compiler: only build thunk struct type when it is needed
[official-gcc.git] / gcc / gimplify.cc
blobd4209ea74a3dc54e4a23047873efa82b7ea88c62
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-iterator.h"
46 #include "gimple-fold.h"
47 #include "tree-eh.h"
48 #include "gimplify.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "tree-hash-traits.h"
57 #include "omp-general.h"
58 #include "omp-low.h"
59 #include "gimple-low.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "dbgcnt.h"
69 #include "omp-offload.h"
70 #include "context.h"
71 #include "tree-nested.h"
73 /* Hash set of poisoned variables in a bind expr. */
74 static hash_set<tree> *asan_poisoned_variables = NULL;
76 enum gimplify_omp_var_data
78 GOVD_SEEN = 0x000001,
79 GOVD_EXPLICIT = 0x000002,
80 GOVD_SHARED = 0x000004,
81 GOVD_PRIVATE = 0x000008,
82 GOVD_FIRSTPRIVATE = 0x000010,
83 GOVD_LASTPRIVATE = 0x000020,
84 GOVD_REDUCTION = 0x000040,
85 GOVD_LOCAL = 0x00080,
86 GOVD_MAP = 0x000100,
87 GOVD_DEBUG_PRIVATE = 0x000200,
88 GOVD_PRIVATE_OUTER_REF = 0x000400,
89 GOVD_LINEAR = 0x000800,
90 GOVD_ALIGNED = 0x001000,
92 /* Flag for GOVD_MAP: don't copy back. */
93 GOVD_MAP_TO_ONLY = 0x002000,
95 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
96 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
98 GOVD_MAP_0LEN_ARRAY = 0x008000,
100 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
101 GOVD_MAP_ALWAYS_TO = 0x010000,
103 /* Flag for shared vars that are or might be stored to in the region. */
104 GOVD_WRITTEN = 0x020000,
106 /* Flag for GOVD_MAP, if it is a forced mapping. */
107 GOVD_MAP_FORCE = 0x040000,
109 /* Flag for GOVD_MAP: must be present already. */
110 GOVD_MAP_FORCE_PRESENT = 0x080000,
112 /* Flag for GOVD_MAP: only allocate. */
113 GOVD_MAP_ALLOC_ONLY = 0x100000,
115 /* Flag for GOVD_MAP: only copy back. */
116 GOVD_MAP_FROM_ONLY = 0x200000,
118 GOVD_NONTEMPORAL = 0x400000,
120 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
121 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
123 GOVD_CONDTEMP = 0x1000000,
125 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
126 GOVD_REDUCTION_INSCAN = 0x2000000,
128 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
129 GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
131 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
132 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
133 | GOVD_LOCAL)
137 enum omp_region_type
139 ORT_WORKSHARE = 0x00,
140 ORT_TASKGROUP = 0x01,
141 ORT_SIMD = 0x04,
143 ORT_PARALLEL = 0x08,
144 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
146 ORT_TASK = 0x10,
147 ORT_UNTIED_TASK = ORT_TASK | 1,
148 ORT_TASKLOOP = ORT_TASK | 2,
149 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
151 ORT_TEAMS = 0x20,
152 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
153 ORT_HOST_TEAMS = ORT_TEAMS | 2,
154 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
156 /* Data region. */
157 ORT_TARGET_DATA = 0x40,
159 /* Data region with offloading. */
160 ORT_TARGET = 0x80,
161 ORT_COMBINED_TARGET = ORT_TARGET | 1,
162 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
164 /* OpenACC variants. */
165 ORT_ACC = 0x100, /* A generic OpenACC region. */
166 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
167 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
168 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
169 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
170 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
172 /* Dummy OpenMP region, used to disable expansion of
173 DECL_VALUE_EXPRs in taskloop pre body. */
174 ORT_NONE = 0x200
177 /* Gimplify hashtable helper. */
179 struct gimplify_hasher : free_ptr_hash <elt_t>
181 static inline hashval_t hash (const elt_t *);
182 static inline bool equal (const elt_t *, const elt_t *);
185 struct gimplify_ctx
187 struct gimplify_ctx *prev_context;
189 vec<gbind *> bind_expr_stack;
190 tree temps;
191 gimple_seq conditional_cleanups;
192 tree exit_label;
193 tree return_temp;
195 vec<tree> case_labels;
196 hash_set<tree> *live_switch_vars;
197 /* The formal temporary table. Should this be persistent? */
198 hash_table<gimplify_hasher> *temp_htab;
200 int conditions;
201 unsigned into_ssa : 1;
202 unsigned allow_rhs_cond_expr : 1;
203 unsigned in_cleanup_point_expr : 1;
204 unsigned keep_stack : 1;
205 unsigned save_stack : 1;
206 unsigned in_switch_expr : 1;
209 enum gimplify_defaultmap_kind
211 GDMK_SCALAR,
212 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
213 GDMK_AGGREGATE,
214 GDMK_ALLOCATABLE,
215 GDMK_POINTER
218 struct gimplify_omp_ctx
220 struct gimplify_omp_ctx *outer_context;
221 splay_tree variables;
222 hash_set<tree> *privatized_types;
223 tree clauses;
224 /* Iteration variables in an OMP_FOR. */
225 vec<tree> loop_iter_var;
226 location_t location;
227 enum omp_clause_default_kind default_kind;
228 enum omp_region_type region_type;
229 enum tree_code code;
230 bool combined_loop;
231 bool distribute;
232 bool target_firstprivatize_array_bases;
233 bool add_safelen1;
234 bool order_concurrent;
235 bool has_depend;
236 bool in_for_exprs;
237 int defaultmap[5];
240 static struct gimplify_ctx *gimplify_ctxp;
241 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
242 static bool in_omp_construct;
244 /* Forward declaration. */
245 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
246 static hash_map<tree, tree> *oacc_declare_returns;
247 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
248 bool (*) (tree), fallback_t, bool);
249 static void prepare_gimple_addressable (tree *, gimple_seq *);
251 /* Shorter alias name for the above function for use in gimplify.cc
252 only. */
254 static inline void
255 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
257 gimple_seq_add_stmt_without_update (seq_p, gs);
260 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
261 NULL, a new sequence is allocated. This function is
262 similar to gimple_seq_add_seq, but does not scan the operands.
263 During gimplification, we need to manipulate statement sequences
264 before the def/use vectors have been constructed. */
266 static void
267 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
269 gimple_stmt_iterator si;
271 if (src == NULL)
272 return;
274 si = gsi_last (*dst_p);
275 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
279 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
280 and popping gimplify contexts. */
282 static struct gimplify_ctx *ctx_pool = NULL;
284 /* Return a gimplify context struct from the pool. */
286 static inline struct gimplify_ctx *
287 ctx_alloc (void)
289 struct gimplify_ctx * c = ctx_pool;
291 if (c)
292 ctx_pool = c->prev_context;
293 else
294 c = XNEW (struct gimplify_ctx);
296 memset (c, '\0', sizeof (*c));
297 return c;
300 /* Put gimplify context C back into the pool. */
302 static inline void
303 ctx_free (struct gimplify_ctx *c)
305 c->prev_context = ctx_pool;
306 ctx_pool = c;
309 /* Free allocated ctx stack memory. */
311 void
312 free_gimplify_stack (void)
314 struct gimplify_ctx *c;
316 while ((c = ctx_pool))
318 ctx_pool = c->prev_context;
319 free (c);
324 /* Set up a context for the gimplifier. */
326 void
327 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
329 struct gimplify_ctx *c = ctx_alloc ();
331 c->prev_context = gimplify_ctxp;
332 gimplify_ctxp = c;
333 gimplify_ctxp->into_ssa = in_ssa;
334 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
337 /* Tear down a context for the gimplifier. If BODY is non-null, then
338 put the temporaries into the outer BIND_EXPR. Otherwise, put them
339 in the local_decls.
341 BODY is not a sequence, but the first tuple in a sequence. */
343 void
344 pop_gimplify_context (gimple *body)
346 struct gimplify_ctx *c = gimplify_ctxp;
348 gcc_assert (c
349 && (!c->bind_expr_stack.exists ()
350 || c->bind_expr_stack.is_empty ()));
351 c->bind_expr_stack.release ();
352 gimplify_ctxp = c->prev_context;
354 if (body)
355 declare_vars (c->temps, body, false);
356 else
357 record_vars (c->temps);
359 delete c->temp_htab;
360 c->temp_htab = NULL;
361 ctx_free (c);
364 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
366 static void
367 gimple_push_bind_expr (gbind *bind_stmt)
369 gimplify_ctxp->bind_expr_stack.reserve (8);
370 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
373 /* Pop the first element off the stack of bindings. */
375 static void
376 gimple_pop_bind_expr (void)
378 gimplify_ctxp->bind_expr_stack.pop ();
381 /* Return the first element of the stack of bindings. */
383 gbind *
384 gimple_current_bind_expr (void)
386 return gimplify_ctxp->bind_expr_stack.last ();
389 /* Return the stack of bindings created during gimplification. */
391 vec<gbind *>
392 gimple_bind_expr_stack (void)
394 return gimplify_ctxp->bind_expr_stack;
397 /* Return true iff there is a COND_EXPR between us and the innermost
398 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
400 static bool
401 gimple_conditional_context (void)
403 return gimplify_ctxp->conditions > 0;
406 /* Note that we've entered a COND_EXPR. */
408 static void
409 gimple_push_condition (void)
411 #ifdef ENABLE_GIMPLE_CHECKING
412 if (gimplify_ctxp->conditions == 0)
413 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
414 #endif
415 ++(gimplify_ctxp->conditions);
418 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
419 now, add any conditional cleanups we've seen to the prequeue. */
421 static void
422 gimple_pop_condition (gimple_seq *pre_p)
424 int conds = --(gimplify_ctxp->conditions);
426 gcc_assert (conds >= 0);
427 if (conds == 0)
429 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
430 gimplify_ctxp->conditional_cleanups = NULL;
434 /* A stable comparison routine for use with splay trees and DECLs. */
436 static int
437 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
439 tree a = (tree) xa;
440 tree b = (tree) xb;
442 return DECL_UID (a) - DECL_UID (b);
445 /* Create a new omp construct that deals with variable remapping. */
447 static struct gimplify_omp_ctx *
448 new_omp_context (enum omp_region_type region_type)
450 struct gimplify_omp_ctx *c;
452 c = XCNEW (struct gimplify_omp_ctx);
453 c->outer_context = gimplify_omp_ctxp;
454 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
455 c->privatized_types = new hash_set<tree>;
456 c->location = input_location;
457 c->region_type = region_type;
458 if ((region_type & ORT_TASK) == 0)
459 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
460 else
461 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
462 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
463 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
464 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
465 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
466 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
468 return c;
471 /* Destroy an omp construct that deals with variable remapping. */
473 static void
474 delete_omp_context (struct gimplify_omp_ctx *c)
476 splay_tree_delete (c->variables);
477 delete c->privatized_types;
478 c->loop_iter_var.release ();
479 XDELETE (c);
482 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
483 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
485 /* Both gimplify the statement T and append it to *SEQ_P. This function
486 behaves exactly as gimplify_stmt, but you don't have to pass T as a
487 reference. */
489 void
490 gimplify_and_add (tree t, gimple_seq *seq_p)
492 gimplify_stmt (&t, seq_p);
495 /* Gimplify statement T into sequence *SEQ_P, and return the first
496 tuple in the sequence of generated tuples for this statement.
497 Return NULL if gimplifying T produced no tuples. */
499 static gimple *
500 gimplify_and_return_first (tree t, gimple_seq *seq_p)
502 gimple_stmt_iterator last = gsi_last (*seq_p);
504 gimplify_and_add (t, seq_p);
506 if (!gsi_end_p (last))
508 gsi_next (&last);
509 return gsi_stmt (last);
511 else
512 return gimple_seq_first_stmt (*seq_p);
515 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
516 LHS, or for a call argument. */
518 static bool
519 is_gimple_mem_rhs (tree t)
521 /* If we're dealing with a renamable type, either source or dest must be
522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t)))
524 return is_gimple_val (t);
525 else
526 return is_gimple_val (t) || is_gimple_lvalue (t);
529 /* Return true if T is a CALL_EXPR or an expression that can be
530 assigned to a temporary. Note that this predicate should only be
531 used during gimplification. See the rationale for this in
532 gimplify_modify_expr. */
534 static bool
535 is_gimple_reg_rhs_or_call (tree t)
537 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
538 || TREE_CODE (t) == CALL_EXPR);
541 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
542 this predicate should only be used during gimplification. See the
543 rationale for this in gimplify_modify_expr. */
545 static bool
546 is_gimple_mem_rhs_or_call (tree t)
548 /* If we're dealing with a renamable type, either source or dest must be
549 a renamed variable. */
550 if (is_gimple_reg_type (TREE_TYPE (t)))
551 return is_gimple_val (t);
552 else
553 return (is_gimple_val (t)
554 || is_gimple_lvalue (t)
555 || TREE_CLOBBER_P (t)
556 || TREE_CODE (t) == CALL_EXPR);
559 /* Create a temporary with a name derived from VAL. Subroutine of
560 lookup_tmp_var; nobody else should call this function. */
562 static inline tree
563 create_tmp_from_val (tree val)
565 /* Drop all qualifiers and address-space information from the value type. */
566 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
567 tree var = create_tmp_var (type, get_name (val));
568 return var;
571 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
572 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
574 static tree
575 lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
577 tree ret;
579 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
580 gcc_assert (!is_formal || !not_gimple_reg);
582 /* If not optimizing, never really reuse a temporary. local-alloc
583 won't allocate any variable that is used in more than one basic
584 block, which means it will go into memory, causing much extra
585 work in reload and final and poorer code generation, outweighing
586 the extra memory allocation here. */
587 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
589 ret = create_tmp_from_val (val);
590 DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
592 else
594 elt_t elt, *elt_p;
595 elt_t **slot;
597 elt.val = val;
598 if (!gimplify_ctxp->temp_htab)
599 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
600 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
601 if (*slot == NULL)
603 elt_p = XNEW (elt_t);
604 elt_p->val = val;
605 elt_p->temp = ret = create_tmp_from_val (val);
606 *slot = elt_p;
608 else
610 elt_p = *slot;
611 ret = elt_p->temp;
615 return ret;
618 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
620 static tree
621 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
622 bool is_formal, bool allow_ssa, bool not_gimple_reg)
624 tree t, mod;
626 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
627 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
628 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
629 fb_rvalue);
631 if (allow_ssa
632 && gimplify_ctxp->into_ssa
633 && is_gimple_reg_type (TREE_TYPE (val)))
635 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
636 if (! gimple_in_ssa_p (cfun))
638 const char *name = get_name (val);
639 if (name)
640 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
643 else
644 t = lookup_tmp_var (val, is_formal, not_gimple_reg);
646 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
648 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
650 /* gimplify_modify_expr might want to reduce this further. */
651 gimplify_and_add (mod, pre_p);
652 ggc_free (mod);
654 return t;
657 /* Return a formal temporary variable initialized with VAL. PRE_P is as
658 in gimplify_expr. Only use this function if:
660 1) The value of the unfactored expression represented by VAL will not
661 change between the initialization and use of the temporary, and
662 2) The temporary will not be otherwise modified.
664 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
665 and #2 means it is inappropriate for && temps.
667 For other cases, use get_initialized_tmp_var instead. */
669 tree
670 get_formal_tmp_var (tree val, gimple_seq *pre_p)
672 return internal_get_tmp_var (val, pre_p, NULL, true, true, false);
675 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
676 are as in gimplify_expr. */
678 tree
679 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
680 gimple_seq *post_p /* = NULL */,
681 bool allow_ssa /* = true */)
683 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa, false);
686 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
687 generate debug info for them; otherwise don't. */
689 void
690 declare_vars (tree vars, gimple *gs, bool debug_info)
692 tree last = vars;
693 if (last)
695 tree temps, block;
697 gbind *scope = as_a <gbind *> (gs);
699 temps = nreverse (last);
701 block = gimple_bind_block (scope);
702 gcc_assert (!block || TREE_CODE (block) == BLOCK);
703 if (!block || !debug_info)
705 DECL_CHAIN (last) = gimple_bind_vars (scope);
706 gimple_bind_set_vars (scope, temps);
708 else
710 /* We need to attach the nodes both to the BIND_EXPR and to its
711 associated BLOCK for debugging purposes. The key point here
712 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
713 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
714 if (BLOCK_VARS (block))
715 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
716 else
718 gimple_bind_set_vars (scope,
719 chainon (gimple_bind_vars (scope), temps));
720 BLOCK_VARS (block) = temps;
726 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
727 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
728 no such upper bound can be obtained. */
730 static void
731 force_constant_size (tree var)
733 /* The only attempt we make is by querying the maximum size of objects
734 of the variable's type. */
736 HOST_WIDE_INT max_size;
738 gcc_assert (VAR_P (var));
740 max_size = max_int_size_in_bytes (TREE_TYPE (var));
742 gcc_assert (max_size >= 0);
744 DECL_SIZE_UNIT (var)
745 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
746 DECL_SIZE (var)
747 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
750 /* Push the temporary variable TMP into the current binding. */
752 void
753 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
755 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
757 /* Later processing assumes that the object size is constant, which might
758 not be true at this point. Force the use of a constant upper bound in
759 this case. */
760 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
761 force_constant_size (tmp);
763 DECL_CONTEXT (tmp) = fn->decl;
764 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
766 record_vars_into (tmp, fn->decl);
769 /* Push the temporary variable TMP into the current binding. */
771 void
772 gimple_add_tmp_var (tree tmp)
774 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
776 /* Later processing assumes that the object size is constant, which might
777 not be true at this point. Force the use of a constant upper bound in
778 this case. */
779 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
780 force_constant_size (tmp);
782 DECL_CONTEXT (tmp) = current_function_decl;
783 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
785 if (gimplify_ctxp)
787 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
788 gimplify_ctxp->temps = tmp;
790 /* Mark temporaries local within the nearest enclosing parallel. */
791 if (gimplify_omp_ctxp)
793 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
794 int flag = GOVD_LOCAL | GOVD_SEEN;
795 while (ctx
796 && (ctx->region_type == ORT_WORKSHARE
797 || ctx->region_type == ORT_TASKGROUP
798 || ctx->region_type == ORT_SIMD
799 || ctx->region_type == ORT_ACC))
801 if (ctx->region_type == ORT_SIMD
802 && TREE_ADDRESSABLE (tmp)
803 && !TREE_STATIC (tmp))
805 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
806 ctx->add_safelen1 = true;
807 else if (ctx->in_for_exprs)
808 flag = GOVD_PRIVATE;
809 else
810 flag = GOVD_PRIVATE | GOVD_SEEN;
811 break;
813 ctx = ctx->outer_context;
815 if (ctx)
816 omp_add_variable (ctx, tmp, flag);
819 else if (cfun)
820 record_vars (tmp);
821 else
823 gimple_seq body_seq;
825 /* This case is for nested functions. We need to expose the locals
826 they create. */
827 body_seq = gimple_body (current_function_decl);
828 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
834 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
835 nodes that are referenced more than once in GENERIC functions. This is
836 necessary because gimplification (translation into GIMPLE) is performed
837 by modifying tree nodes in-place, so gimplication of a shared node in a
838 first context could generate an invalid GIMPLE form in a second context.
840 This is achieved with a simple mark/copy/unmark algorithm that walks the
841 GENERIC representation top-down, marks nodes with TREE_VISITED the first
842 time it encounters them, duplicates them if they already have TREE_VISITED
843 set, and finally removes the TREE_VISITED marks it has set.
845 The algorithm works only at the function level, i.e. it generates a GENERIC
846 representation of a function with no nodes shared within the function when
847 passed a GENERIC function (except for nodes that are allowed to be shared).
849 At the global level, it is also necessary to unshare tree nodes that are
850 referenced in more than one function, for the same aforementioned reason.
851 This requires some cooperation from the front-end. There are 2 strategies:
853 1. Manual unsharing. The front-end needs to call unshare_expr on every
854 expression that might end up being shared across functions.
856 2. Deep unsharing. This is an extension of regular unsharing. Instead
857 of calling unshare_expr on expressions that might be shared across
858 functions, the front-end pre-marks them with TREE_VISITED. This will
859 ensure that they are unshared on the first reference within functions
860 when the regular unsharing algorithm runs. The counterpart is that
861 this algorithm must look deeper than for manual unsharing, which is
862 specified by LANG_HOOKS_DEEP_UNSHARING.
864 If there are only few specific cases of node sharing across functions, it is
865 probably easier for a front-end to unshare the expressions manually. On the
866 contrary, if the expressions generated at the global level are as widespread
867 as expressions generated within functions, deep unsharing is very likely the
868 way to go. */
870 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
871 These nodes model computations that must be done once. If we were to
872 unshare something like SAVE_EXPR(i++), the gimplification process would
873 create wrong code. However, if DATA is non-null, it must hold a pointer
874 set that is used to unshare the subtrees of these nodes. */
876 static tree
877 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
879 tree t = *tp;
880 enum tree_code code = TREE_CODE (t);
882 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
883 copy their subtrees if we can make sure to do it only once. */
884 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
886 if (data && !((hash_set<tree> *)data)->add (t))
888 else
889 *walk_subtrees = 0;
892 /* Stop at types, decls, constants like copy_tree_r. */
893 else if (TREE_CODE_CLASS (code) == tcc_type
894 || TREE_CODE_CLASS (code) == tcc_declaration
895 || TREE_CODE_CLASS (code) == tcc_constant)
896 *walk_subtrees = 0;
898 /* Cope with the statement expression extension. */
899 else if (code == STATEMENT_LIST)
902 /* Leave the bulk of the work to copy_tree_r itself. */
903 else
904 copy_tree_r (tp, walk_subtrees, NULL);
906 return NULL_TREE;
909 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
910 If *TP has been visited already, then *TP is deeply copied by calling
911 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
913 static tree
914 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
916 tree t = *tp;
917 enum tree_code code = TREE_CODE (t);
919 /* Skip types, decls, and constants. But we do want to look at their
920 types and the bounds of types. Mark them as visited so we properly
921 unmark their subtrees on the unmark pass. If we've already seen them,
922 don't look down further. */
923 if (TREE_CODE_CLASS (code) == tcc_type
924 || TREE_CODE_CLASS (code) == tcc_declaration
925 || TREE_CODE_CLASS (code) == tcc_constant)
927 if (TREE_VISITED (t))
928 *walk_subtrees = 0;
929 else
930 TREE_VISITED (t) = 1;
933 /* If this node has been visited already, unshare it and don't look
934 any deeper. */
935 else if (TREE_VISITED (t))
937 walk_tree (tp, mostly_copy_tree_r, data, NULL);
938 *walk_subtrees = 0;
941 /* Otherwise, mark the node as visited and keep looking. */
942 else
943 TREE_VISITED (t) = 1;
945 return NULL_TREE;
948 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
949 copy_if_shared_r callback unmodified. */
951 void
952 copy_if_shared (tree *tp, void *data)
954 walk_tree (tp, copy_if_shared_r, data, NULL);
957 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
958 any nested functions. */
960 static void
961 unshare_body (tree fndecl)
963 struct cgraph_node *cgn = cgraph_node::get (fndecl);
964 /* If the language requires deep unsharing, we need a pointer set to make
965 sure we don't repeatedly unshare subtrees of unshareable nodes. */
966 hash_set<tree> *visited
967 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
969 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
970 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
971 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
973 delete visited;
975 if (cgn)
976 for (cgn = first_nested_function (cgn); cgn;
977 cgn = next_nested_function (cgn))
978 unshare_body (cgn->decl);
981 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
982 Subtrees are walked until the first unvisited node is encountered. */
984 static tree
985 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
987 tree t = *tp;
989 /* If this node has been visited, unmark it and keep looking. */
990 if (TREE_VISITED (t))
991 TREE_VISITED (t) = 0;
993 /* Otherwise, don't look any deeper. */
994 else
995 *walk_subtrees = 0;
997 return NULL_TREE;
1000 /* Unmark the visited trees rooted at *TP. */
1002 static inline void
1003 unmark_visited (tree *tp)
1005 walk_tree (tp, unmark_visited_r, NULL, NULL);
1008 /* Likewise, but mark all trees as not visited. */
1010 static void
1011 unvisit_body (tree fndecl)
1013 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1015 unmark_visited (&DECL_SAVED_TREE (fndecl));
1016 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1017 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1019 if (cgn)
1020 for (cgn = first_nested_function (cgn);
1021 cgn; cgn = next_nested_function (cgn))
1022 unvisit_body (cgn->decl);
1025 /* Unconditionally make an unshared copy of EXPR. This is used when using
1026 stored expressions which span multiple functions, such as BINFO_VTABLE,
1027 as the normal unsharing process can't tell that they're shared. */
1029 tree
1030 unshare_expr (tree expr)
1032 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1033 return expr;
1036 /* Worker for unshare_expr_without_location. */
1038 static tree
1039 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1041 if (EXPR_P (*tp))
1042 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1043 else
1044 *walk_subtrees = 0;
1045 return NULL_TREE;
1048 /* Similar to unshare_expr but also prune all expression locations
1049 from EXPR. */
1051 tree
1052 unshare_expr_without_location (tree expr)
1054 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1055 if (EXPR_P (expr))
1056 walk_tree (&expr, prune_expr_location, NULL, NULL);
1057 return expr;
1060 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1061 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1062 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1063 EXPR is the location of the EXPR. */
1065 static location_t
1066 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1068 if (!expr)
1069 return or_else;
1071 if (EXPR_HAS_LOCATION (expr))
1072 return EXPR_LOCATION (expr);
1074 if (TREE_CODE (expr) != STATEMENT_LIST)
1075 return or_else;
1077 tree_stmt_iterator i = tsi_start (expr);
1079 bool found = false;
1080 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1082 found = true;
1083 tsi_next (&i);
1086 if (!found || !tsi_one_before_end_p (i))
1087 return or_else;
1089 return rexpr_location (tsi_stmt (i), or_else);
1092 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1093 rexpr_location for the potential recursion. */
1095 static inline bool
1096 rexpr_has_location (tree expr)
1098 return rexpr_location (expr) != UNKNOWN_LOCATION;
1102 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1103 contain statements and have a value. Assign its value to a temporary
1104 and give it void_type_node. Return the temporary, or NULL_TREE if
1105 WRAPPER was already void. */
1107 tree
1108 voidify_wrapper_expr (tree wrapper, tree temp)
1110 tree type = TREE_TYPE (wrapper);
1111 if (type && !VOID_TYPE_P (type))
1113 tree *p;
1115 /* Set p to point to the body of the wrapper. Loop until we find
1116 something that isn't a wrapper. */
1117 for (p = &wrapper; p && *p; )
1119 switch (TREE_CODE (*p))
1121 case BIND_EXPR:
1122 TREE_SIDE_EFFECTS (*p) = 1;
1123 TREE_TYPE (*p) = void_type_node;
1124 /* For a BIND_EXPR, the body is operand 1. */
1125 p = &BIND_EXPR_BODY (*p);
1126 break;
1128 case CLEANUP_POINT_EXPR:
1129 case TRY_FINALLY_EXPR:
1130 case TRY_CATCH_EXPR:
1131 TREE_SIDE_EFFECTS (*p) = 1;
1132 TREE_TYPE (*p) = void_type_node;
1133 p = &TREE_OPERAND (*p, 0);
1134 break;
1136 case STATEMENT_LIST:
1138 tree_stmt_iterator i = tsi_last (*p);
1139 TREE_SIDE_EFFECTS (*p) = 1;
1140 TREE_TYPE (*p) = void_type_node;
1141 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1143 break;
1145 case COMPOUND_EXPR:
1146 /* Advance to the last statement. Set all container types to
1147 void. */
1148 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1150 TREE_SIDE_EFFECTS (*p) = 1;
1151 TREE_TYPE (*p) = void_type_node;
1153 break;
1155 case TRANSACTION_EXPR:
1156 TREE_SIDE_EFFECTS (*p) = 1;
1157 TREE_TYPE (*p) = void_type_node;
1158 p = &TRANSACTION_EXPR_BODY (*p);
1159 break;
1161 default:
1162 /* Assume that any tree upon which voidify_wrapper_expr is
1163 directly called is a wrapper, and that its body is op0. */
1164 if (p == &wrapper)
1166 TREE_SIDE_EFFECTS (*p) = 1;
1167 TREE_TYPE (*p) = void_type_node;
1168 p = &TREE_OPERAND (*p, 0);
1169 break;
1171 goto out;
1175 out:
1176 if (p == NULL || IS_EMPTY_STMT (*p))
1177 temp = NULL_TREE;
1178 else if (temp)
1180 /* The wrapper is on the RHS of an assignment that we're pushing
1181 down. */
1182 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1183 || TREE_CODE (temp) == MODIFY_EXPR);
1184 TREE_OPERAND (temp, 1) = *p;
1185 *p = temp;
1187 else
1189 temp = create_tmp_var (type, "retval");
1190 *p = build2 (INIT_EXPR, type, temp, *p);
1193 return temp;
1196 return NULL_TREE;
1199 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1200 a temporary through which they communicate. */
1202 static void
1203 build_stack_save_restore (gcall **save, gcall **restore)
1205 tree tmp_var;
1207 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1208 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1209 gimple_call_set_lhs (*save, tmp_var);
1211 *restore
1212 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1213 1, tmp_var);
1216 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1218 static tree
1219 build_asan_poison_call_expr (tree decl)
1221 /* Do not poison variables that have size equal to zero. */
1222 tree unit_size = DECL_SIZE_UNIT (decl);
1223 if (zerop (unit_size))
1224 return NULL_TREE;
1226 tree base = build_fold_addr_expr (decl);
1228 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1229 void_type_node, 3,
1230 build_int_cst (integer_type_node,
1231 ASAN_MARK_POISON),
1232 base, unit_size);
1235 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1236 on POISON flag, shadow memory of a DECL variable. The call will be
1237 put on location identified by IT iterator, where BEFORE flag drives
1238 position where the stmt will be put. */
1240 static void
1241 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1242 bool before)
1244 tree unit_size = DECL_SIZE_UNIT (decl);
1245 tree base = build_fold_addr_expr (decl);
1247 /* Do not poison variables that have size equal to zero. */
1248 if (zerop (unit_size))
1249 return;
1251 /* It's necessary to have all stack variables aligned to ASAN granularity
1252 bytes. */
1253 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1254 unsigned shadow_granularity
1255 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1256 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1257 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1259 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1261 gimple *g
1262 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1263 build_int_cst (integer_type_node, flags),
1264 base, unit_size);
1266 if (before)
1267 gsi_insert_before (it, g, GSI_NEW_STMT);
1268 else
1269 gsi_insert_after (it, g, GSI_NEW_STMT);
1272 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1273 either poisons or unpoisons a DECL. Created statement is appended
1274 to SEQ_P gimple sequence. */
1276 static void
1277 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1279 gimple_stmt_iterator it = gsi_last (*seq_p);
1280 bool before = false;
1282 if (gsi_end_p (it))
1283 before = true;
1285 asan_poison_variable (decl, poison, &it, before);
1288 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1290 static int
1291 sort_by_decl_uid (const void *a, const void *b)
1293 const tree *t1 = (const tree *)a;
1294 const tree *t2 = (const tree *)b;
1296 int uid1 = DECL_UID (*t1);
1297 int uid2 = DECL_UID (*t2);
1299 if (uid1 < uid2)
1300 return -1;
1301 else if (uid1 > uid2)
1302 return 1;
1303 else
1304 return 0;
1307 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1308 depending on POISON flag. Created statement is appended
1309 to SEQ_P gimple sequence. */
1311 static void
1312 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1314 unsigned c = variables->elements ();
1315 if (c == 0)
1316 return;
1318 auto_vec<tree> sorted_variables (c);
1320 for (hash_set<tree>::iterator it = variables->begin ();
1321 it != variables->end (); ++it)
1322 sorted_variables.safe_push (*it);
1324 sorted_variables.qsort (sort_by_decl_uid);
1326 unsigned i;
1327 tree var;
1328 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1330 asan_poison_variable (var, poison, seq_p);
1332 /* Add use_after_scope_memory attribute for the variable in order
1333 to prevent re-written into SSA. */
1334 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1335 DECL_ATTRIBUTES (var)))
1336 DECL_ATTRIBUTES (var)
1337 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1338 integer_one_node,
1339 DECL_ATTRIBUTES (var));
1343 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1345 static enum gimplify_status
1346 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1348 tree bind_expr = *expr_p;
1349 bool old_keep_stack = gimplify_ctxp->keep_stack;
1350 bool old_save_stack = gimplify_ctxp->save_stack;
1351 tree t;
1352 gbind *bind_stmt;
1353 gimple_seq body, cleanup;
1354 gcall *stack_save;
1355 location_t start_locus = 0, end_locus = 0;
1356 tree ret_clauses = NULL;
1358 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1360 /* Mark variables seen in this bind expr. */
1361 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1363 if (VAR_P (t))
1365 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1367 /* Mark variable as local. */
1368 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1370 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1371 || splay_tree_lookup (ctx->variables,
1372 (splay_tree_key) t) == NULL)
1374 int flag = GOVD_LOCAL;
1375 if (ctx->region_type == ORT_SIMD
1376 && TREE_ADDRESSABLE (t)
1377 && !TREE_STATIC (t))
1379 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1380 ctx->add_safelen1 = true;
1381 else
1382 flag = GOVD_PRIVATE;
1384 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1386 /* Static locals inside of target construct or offloaded
1387 routines need to be "omp declare target". */
1388 if (TREE_STATIC (t))
1389 for (; ctx; ctx = ctx->outer_context)
1390 if ((ctx->region_type & ORT_TARGET) != 0)
1392 if (!lookup_attribute ("omp declare target",
1393 DECL_ATTRIBUTES (t)))
1395 tree id = get_identifier ("omp declare target");
1396 DECL_ATTRIBUTES (t)
1397 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1398 varpool_node *node = varpool_node::get (t);
1399 if (node)
1401 node->offloadable = 1;
1402 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1404 g->have_offload = true;
1405 if (!in_lto_p)
1406 vec_safe_push (offload_vars, t);
1410 break;
1414 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1416 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1417 cfun->has_local_explicit_reg_vars = true;
1421 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1422 BIND_EXPR_BLOCK (bind_expr));
1423 gimple_push_bind_expr (bind_stmt);
1425 gimplify_ctxp->keep_stack = false;
1426 gimplify_ctxp->save_stack = false;
1428 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1429 body = NULL;
1430 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1431 gimple_bind_set_body (bind_stmt, body);
1433 /* Source location wise, the cleanup code (stack_restore and clobbers)
1434 belongs to the end of the block, so propagate what we have. The
1435 stack_save operation belongs to the beginning of block, which we can
1436 infer from the bind_expr directly if the block has no explicit
1437 assignment. */
1438 if (BIND_EXPR_BLOCK (bind_expr))
1440 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1441 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1443 if (start_locus == 0)
1444 start_locus = EXPR_LOCATION (bind_expr);
1446 cleanup = NULL;
1447 stack_save = NULL;
1449 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1450 the stack space allocated to the VLAs. */
1451 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1453 gcall *stack_restore;
1455 /* Save stack on entry and restore it on exit. Add a try_finally
1456 block to achieve this. */
1457 build_stack_save_restore (&stack_save, &stack_restore);
1459 gimple_set_location (stack_save, start_locus);
1460 gimple_set_location (stack_restore, end_locus);
1462 gimplify_seq_add_stmt (&cleanup, stack_restore);
1465 /* Add clobbers for all variables that go out of scope. */
1466 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1468 if (VAR_P (t)
1469 && !is_global_var (t)
1470 && DECL_CONTEXT (t) == current_function_decl)
1472 if (!DECL_HARD_REGISTER (t)
1473 && !TREE_THIS_VOLATILE (t)
1474 && !DECL_HAS_VALUE_EXPR_P (t)
1475 /* Only care for variables that have to be in memory. Others
1476 will be rewritten into SSA names, hence moved to the
1477 top-level. */
1478 && !is_gimple_reg (t)
1479 && flag_stack_reuse != SR_NONE)
1481 tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_EOL);
1482 gimple *clobber_stmt;
1483 clobber_stmt = gimple_build_assign (t, clobber);
1484 gimple_set_location (clobber_stmt, end_locus);
1485 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1488 if (flag_openacc && oacc_declare_returns != NULL)
1490 tree key = t;
1491 if (DECL_HAS_VALUE_EXPR_P (key))
1493 key = DECL_VALUE_EXPR (key);
1494 if (TREE_CODE (key) == INDIRECT_REF)
1495 key = TREE_OPERAND (key, 0);
1497 tree *c = oacc_declare_returns->get (key);
1498 if (c != NULL)
1500 if (ret_clauses)
1501 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1503 ret_clauses = unshare_expr (*c);
1505 oacc_declare_returns->remove (key);
1507 if (oacc_declare_returns->is_empty ())
1509 delete oacc_declare_returns;
1510 oacc_declare_returns = NULL;
1516 if (asan_poisoned_variables != NULL
1517 && asan_poisoned_variables->contains (t))
1519 asan_poisoned_variables->remove (t);
1520 asan_poison_variable (t, true, &cleanup);
1523 if (gimplify_ctxp->live_switch_vars != NULL
1524 && gimplify_ctxp->live_switch_vars->contains (t))
1525 gimplify_ctxp->live_switch_vars->remove (t);
1528 if (ret_clauses)
1530 gomp_target *stmt;
1531 gimple_stmt_iterator si = gsi_start (cleanup);
1533 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1534 ret_clauses);
1535 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1538 if (cleanup)
1540 gtry *gs;
1541 gimple_seq new_body;
1543 new_body = NULL;
1544 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1545 GIMPLE_TRY_FINALLY);
1547 if (stack_save)
1548 gimplify_seq_add_stmt (&new_body, stack_save);
1549 gimplify_seq_add_stmt (&new_body, gs);
1550 gimple_bind_set_body (bind_stmt, new_body);
1553 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1554 if (!gimplify_ctxp->keep_stack)
1555 gimplify_ctxp->keep_stack = old_keep_stack;
1556 gimplify_ctxp->save_stack = old_save_stack;
1558 gimple_pop_bind_expr ();
1560 gimplify_seq_add_stmt (pre_p, bind_stmt);
1562 if (temp)
1564 *expr_p = temp;
1565 return GS_OK;
1568 *expr_p = NULL_TREE;
1569 return GS_ALL_DONE;
1572 /* Maybe add early return predict statement to PRE_P sequence. */
1574 static void
1575 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1577 /* If we are not in a conditional context, add PREDICT statement. */
1578 if (gimple_conditional_context ())
1580 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1581 NOT_TAKEN);
1582 gimplify_seq_add_stmt (pre_p, predict);
1586 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1587 GIMPLE value, it is assigned to a new temporary and the statement is
1588 re-written to return the temporary.
1590 PRE_P points to the sequence where side effects that must happen before
1591 STMT should be stored. */
1593 static enum gimplify_status
1594 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1596 greturn *ret;
1597 tree ret_expr = TREE_OPERAND (stmt, 0);
1598 tree result_decl, result;
1600 if (ret_expr == error_mark_node)
1601 return GS_ERROR;
1603 if (!ret_expr
1604 || TREE_CODE (ret_expr) == RESULT_DECL)
1606 maybe_add_early_return_predict_stmt (pre_p);
1607 greturn *ret = gimple_build_return (ret_expr);
1608 copy_warning (ret, stmt);
1609 gimplify_seq_add_stmt (pre_p, ret);
1610 return GS_ALL_DONE;
1613 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1614 result_decl = NULL_TREE;
1615 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1617 /* Used in C++ for handling EH cleanup of the return value if a local
1618 cleanup throws. Assume the front-end knows what it's doing. */
1619 result_decl = DECL_RESULT (current_function_decl);
1620 /* But crash if we end up trying to modify ret_expr below. */
1621 ret_expr = NULL_TREE;
1623 else
1625 result_decl = TREE_OPERAND (ret_expr, 0);
1627 /* See through a return by reference. */
1628 if (TREE_CODE (result_decl) == INDIRECT_REF)
1629 result_decl = TREE_OPERAND (result_decl, 0);
1631 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1632 || TREE_CODE (ret_expr) == INIT_EXPR)
1633 && TREE_CODE (result_decl) == RESULT_DECL);
1636 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1637 Recall that aggregate_value_p is FALSE for any aggregate type that is
1638 returned in registers. If we're returning values in registers, then
1639 we don't want to extend the lifetime of the RESULT_DECL, particularly
1640 across another call. In addition, for those aggregates for which
1641 hard_function_value generates a PARALLEL, we'll die during normal
1642 expansion of structure assignments; there's special code in expand_return
1643 to handle this case that does not exist in expand_expr. */
1644 if (!result_decl)
1645 result = NULL_TREE;
1646 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1648 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1650 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1651 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1652 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1653 should be effectively allocated by the caller, i.e. all calls to
1654 this function must be subject to the Return Slot Optimization. */
1655 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1656 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1658 result = result_decl;
1660 else if (gimplify_ctxp->return_temp)
1661 result = gimplify_ctxp->return_temp;
1662 else
1664 result = create_tmp_reg (TREE_TYPE (result_decl));
1666 /* ??? With complex control flow (usually involving abnormal edges),
1667 we can wind up warning about an uninitialized value for this. Due
1668 to how this variable is constructed and initialized, this is never
1669 true. Give up and never warn. */
1670 suppress_warning (result, OPT_Wuninitialized);
1672 gimplify_ctxp->return_temp = result;
1675 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1676 Then gimplify the whole thing. */
1677 if (result != result_decl)
1678 TREE_OPERAND (ret_expr, 0) = result;
1680 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1682 maybe_add_early_return_predict_stmt (pre_p);
1683 ret = gimple_build_return (result);
1684 copy_warning (ret, stmt);
1685 gimplify_seq_add_stmt (pre_p, ret);
1687 return GS_ALL_DONE;
1690 /* Gimplify a variable-length array DECL. */
1692 static void
1693 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1695 /* This is a variable-sized decl. Simplify its size and mark it
1696 for deferred expansion. */
1697 tree t, addr, ptr_type;
1699 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1700 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1702 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1703 if (DECL_HAS_VALUE_EXPR_P (decl))
1704 return;
1706 /* All occurrences of this decl in final gimplified code will be
1707 replaced by indirection. Setting DECL_VALUE_EXPR does two
1708 things: First, it lets the rest of the gimplifier know what
1709 replacement to use. Second, it lets the debug info know
1710 where to find the value. */
1711 ptr_type = build_pointer_type (TREE_TYPE (decl));
1712 addr = create_tmp_var (ptr_type, get_name (decl));
1713 DECL_IGNORED_P (addr) = 0;
1714 t = build_fold_indirect_ref (addr);
1715 TREE_THIS_NOTRAP (t) = 1;
1716 SET_DECL_VALUE_EXPR (decl, t);
1717 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1719 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1720 max_int_size_in_bytes (TREE_TYPE (decl)));
1721 /* The call has been built for a variable-sized object. */
1722 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1723 t = fold_convert (ptr_type, t);
1724 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1726 gimplify_and_add (t, seq_p);
1728 /* Record the dynamic allocation associated with DECL if requested. */
1729 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1730 record_dynamic_alloc (decl);
1733 /* A helper function to be called via walk_tree. Mark all labels under *TP
1734 as being forced. To be called for DECL_INITIAL of static variables. */
1736 static tree
1737 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1739 if (TYPE_P (*tp))
1740 *walk_subtrees = 0;
1741 if (TREE_CODE (*tp) == LABEL_DECL)
1743 FORCED_LABEL (*tp) = 1;
1744 cfun->has_forced_label_in_static = 1;
1747 return NULL_TREE;
1750 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1751 Build a call to internal const function DEFERRED_INIT:
1752 1st argument: SIZE of the DECL;
1753 2nd argument: INIT_TYPE;
1754 3rd argument: NAME of the DECL;
1756 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1758 static void
1759 gimple_add_init_for_auto_var (tree decl,
1760 enum auto_init_type init_type,
1761 gimple_seq *seq_p)
1763 gcc_assert (auto_var_p (decl));
1764 gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
1765 location_t loc = EXPR_LOCATION (decl);
1766 tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
1768 tree init_type_node
1769 = build_int_cst (integer_type_node, (int) init_type);
1771 tree decl_name = NULL_TREE;
1772 if (DECL_NAME (decl))
1774 decl_name = build_string_literal (IDENTIFIER_LENGTH (DECL_NAME (decl)) + 1,
1775 IDENTIFIER_POINTER (DECL_NAME (decl)));
1777 else
1779 char *decl_name_anonymous = xasprintf ("D.%u", DECL_UID (decl));
1780 decl_name = build_string_literal (strlen (decl_name_anonymous) + 1,
1781 decl_name_anonymous);
1782 free (decl_name_anonymous);
1785 tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
1786 TREE_TYPE (decl), 3,
1787 decl_size, init_type_node,
1788 decl_name);
1790 gimplify_assign (decl, call, seq_p);
1793 /* Generate padding initialization for automatic vairable DECL.
1794 C guarantees that brace-init with fewer initializers than members
1795 aggregate will initialize the rest of the aggregate as-if it were
1796 static initialization. In turn static initialization guarantees
1797 that padding is initialized to zero. So, we always initialize paddings
1798 to zeroes regardless INIT_TYPE.
1799 To do the padding initialization, we insert a call to
1800 __builtin_clear_padding (&decl, 0, for_auto_init = true).
1801 Note, we add an additional dummy argument for __builtin_clear_padding,
1802 'for_auto_init' to distinguish whether this call is for automatic
1803 variable initialization or not.
1805 static void
1806 gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
1807 gimple_seq *seq_p)
1809 tree addr_of_decl = NULL_TREE;
1810 tree fn = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
1812 if (is_vla)
1814 /* The temporary address variable for this vla should be
1815 created in gimplify_vla_decl. */
1816 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
1817 gcc_assert (TREE_CODE (DECL_VALUE_EXPR (decl)) == INDIRECT_REF);
1818 addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
1820 else
1822 mark_addressable (decl);
1823 addr_of_decl = build_fold_addr_expr (decl);
1826 gimple *call = gimple_build_call (fn, 2, addr_of_decl,
1827 build_one_cst (TREE_TYPE (addr_of_decl)));
1828 gimplify_seq_add_stmt (seq_p, call);
1831 /* Return true if the DECL need to be automaticly initialized by the
1832 compiler. */
1833 static bool
1834 is_var_need_auto_init (tree decl)
1836 if (auto_var_p (decl)
1837 && (TREE_CODE (decl) != VAR_DECL
1838 || !DECL_HARD_REGISTER (decl))
1839 && (flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
1840 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl)))
1841 && !OPAQUE_TYPE_P (TREE_TYPE (decl))
1842 && !is_empty_type (TREE_TYPE (decl)))
1843 return true;
1844 return false;
1847 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1848 and initialization explicit. */
1850 static enum gimplify_status
1851 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1853 tree stmt = *stmt_p;
1854 tree decl = DECL_EXPR_DECL (stmt);
1856 *stmt_p = NULL_TREE;
1858 if (TREE_TYPE (decl) == error_mark_node)
1859 return GS_ERROR;
1861 if ((TREE_CODE (decl) == TYPE_DECL
1862 || VAR_P (decl))
1863 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1865 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1866 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1867 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1870 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1871 in case its size expressions contain problematic nodes like CALL_EXPR. */
1872 if (TREE_CODE (decl) == TYPE_DECL
1873 && DECL_ORIGINAL_TYPE (decl)
1874 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1876 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1877 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1878 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1881 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1883 tree init = DECL_INITIAL (decl);
1884 bool is_vla = false;
1885 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
1886 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
1887 If the decl has VALUE_EXPR that was created by FE (usually
1888 C++FE), it's a proxy varaible, and FE already initialized
1889 the VALUE_EXPR of it, we should not initialize it anymore. */
1890 bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
1892 poly_uint64 size;
1893 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1894 || (!TREE_STATIC (decl)
1895 && flag_stack_check == GENERIC_STACK_CHECK
1896 && maybe_gt (size,
1897 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1899 gimplify_vla_decl (decl, seq_p);
1900 is_vla = true;
1903 if (asan_poisoned_variables
1904 && !is_vla
1905 && TREE_ADDRESSABLE (decl)
1906 && !TREE_STATIC (decl)
1907 && !DECL_HAS_VALUE_EXPR_P (decl)
1908 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1909 && dbg_cnt (asan_use_after_scope)
1910 && !gimplify_omp_ctxp
1911 /* GNAT introduces temporaries to hold return values of calls in
1912 initializers of variables defined in other units, so the
1913 declaration of the variable is discarded completely. We do not
1914 want to issue poison calls for such dropped variables. */
1915 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
1916 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
1918 asan_poisoned_variables->add (decl);
1919 asan_poison_variable (decl, false, seq_p);
1920 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1921 gimplify_ctxp->live_switch_vars->add (decl);
1924 /* Some front ends do not explicitly declare all anonymous
1925 artificial variables. We compensate here by declaring the
1926 variables, though it would be better if the front ends would
1927 explicitly declare them. */
1928 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1929 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1930 gimple_add_tmp_var (decl);
1932 if (init && init != error_mark_node)
1934 if (!TREE_STATIC (decl))
1936 DECL_INITIAL (decl) = NULL_TREE;
1937 init = build2 (INIT_EXPR, void_type_node, decl, init);
1938 gimplify_and_add (init, seq_p);
1939 ggc_free (init);
1940 /* Clear TREE_READONLY if we really have an initialization. */
1941 if (!DECL_INITIAL (decl)
1942 && !omp_privatize_by_reference (decl))
1943 TREE_READONLY (decl) = 0;
1945 else
1946 /* We must still examine initializers for static variables
1947 as they may contain a label address. */
1948 walk_tree (&init, force_labels_r, NULL, NULL);
1950 /* When there is no explicit initializer, if the user requested,
1951 We should insert an artifical initializer for this automatic
1952 variable. */
1953 else if (is_var_need_auto_init (decl)
1954 && !decl_had_value_expr_p)
1956 gimple_add_init_for_auto_var (decl,
1957 flag_auto_var_init,
1958 seq_p);
1959 /* The expanding of a call to the above .DEFERRED_INIT will apply
1960 block initialization to the whole space covered by this variable.
1961 As a result, all the paddings will be initialized to zeroes
1962 for zero initialization and 0xFE byte-repeatable patterns for
1963 pattern initialization.
1964 In order to make the paddings as zeroes for pattern init, We
1965 should add a call to __builtin_clear_padding to clear the
1966 paddings to zero in compatiple with CLANG.
1967 We cannot insert this call if the variable is a gimple register
1968 since __builtin_clear_padding will take the address of the
1969 variable. As a result, if a long double/_Complex long double
1970 variable will spilled into stack later, its padding is 0XFE. */
1971 if (flag_auto_var_init == AUTO_INIT_PATTERN
1972 && !is_gimple_reg (decl)
1973 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
1974 gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
1978 return GS_ALL_DONE;
1981 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1982 and replacing the LOOP_EXPR with goto, but if the loop contains an
1983 EXIT_EXPR, we need to append a label for it to jump to. */
1985 static enum gimplify_status
1986 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1988 tree saved_label = gimplify_ctxp->exit_label;
1989 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1991 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1993 gimplify_ctxp->exit_label = NULL_TREE;
1995 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1997 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1999 if (gimplify_ctxp->exit_label)
2000 gimplify_seq_add_stmt (pre_p,
2001 gimple_build_label (gimplify_ctxp->exit_label));
2003 gimplify_ctxp->exit_label = saved_label;
2005 *expr_p = NULL;
2006 return GS_ALL_DONE;
2009 /* Gimplify a statement list onto a sequence. These may be created either
2010 by an enlightened front-end, or by shortcut_cond_expr. */
2012 static enum gimplify_status
2013 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2015 tree temp = voidify_wrapper_expr (*expr_p, NULL);
2017 tree_stmt_iterator i = tsi_start (*expr_p);
2019 while (!tsi_end_p (i))
2021 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2022 tsi_delink (&i);
2025 if (temp)
2027 *expr_p = temp;
2028 return GS_OK;
2031 return GS_ALL_DONE;
2035 /* Emit warning for the unreachable statment STMT if needed.
2036 Return the gimple itself when the warning is emitted, otherwise
2037 return NULL. */
2038 static gimple *
2039 emit_warn_switch_unreachable (gimple *stmt)
2041 if (gimple_code (stmt) == GIMPLE_GOTO
2042 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2043 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2044 /* Don't warn for compiler-generated gotos. These occur
2045 in Duff's devices, for example. */
2046 return NULL;
2047 else if ((flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2048 && ((gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2049 || (gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2050 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2051 || (is_gimple_assign (stmt)
2052 && gimple_assign_single_p (stmt)
2053 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2054 && gimple_call_internal_p (
2055 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)),
2056 IFN_DEFERRED_INIT))))
2057 /* Don't warn for compiler-generated initializations for
2058 -ftrivial-auto-var-init.
2059 There are 3 cases:
2060 case 1: a call to .DEFERRED_INIT;
2061 case 2: a call to __builtin_clear_padding with the 2nd argument is
2062 present and non-zero;
2063 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2064 that has the LHS of .DEFERRED_INIT as the RHS as following:
2065 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2066 i1 = _1. */
2067 return NULL;
2068 else
2069 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
2070 "statement will never be executed");
2071 return stmt;
2074 /* Callback for walk_gimple_seq. */
2076 static tree
2077 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2078 bool *handled_ops_p,
2079 struct walk_stmt_info *wi)
2081 gimple *stmt = gsi_stmt (*gsi_p);
2082 bool unreachable_issued = wi->info != NULL;
2084 *handled_ops_p = true;
2085 switch (gimple_code (stmt))
2087 case GIMPLE_TRY:
2088 /* A compiler-generated cleanup or a user-written try block.
2089 If it's empty, don't dive into it--that would result in
2090 worse location info. */
2091 if (gimple_try_eval (stmt) == NULL)
2093 if (warn_switch_unreachable && !unreachable_issued)
2094 wi->info = emit_warn_switch_unreachable (stmt);
2096 /* Stop when auto var init warning is not on. */
2097 if (!warn_trivial_auto_var_init)
2098 return integer_zero_node;
2100 /* Fall through. */
2101 case GIMPLE_BIND:
2102 case GIMPLE_CATCH:
2103 case GIMPLE_EH_FILTER:
2104 case GIMPLE_TRANSACTION:
2105 /* Walk the sub-statements. */
2106 *handled_ops_p = false;
2107 break;
2109 case GIMPLE_DEBUG:
2110 /* Ignore these. We may generate them before declarations that
2111 are never executed. If there's something to warn about,
2112 there will be non-debug stmts too, and we'll catch those. */
2113 break;
2115 case GIMPLE_LABEL:
2116 /* Stop till the first Label. */
2117 return integer_zero_node;
2118 case GIMPLE_CALL:
2119 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2121 *handled_ops_p = false;
2122 break;
2124 if (warn_trivial_auto_var_init
2125 && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2126 && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2128 /* Get the variable name from the 3rd argument of call. */
2129 tree var_name = gimple_call_arg (stmt, 2);
2130 var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2131 const char *var_name_str = TREE_STRING_POINTER (var_name);
2133 warning_at (gimple_location (stmt), OPT_Wtrivial_auto_var_init,
2134 "%qs cannot be initialized with"
2135 "%<-ftrivial-auto-var_init%>",
2136 var_name_str);
2137 break;
2140 /* Fall through. */
2141 default:
2142 /* check the first "real" statement (not a decl/lexical scope/...), issue
2143 warning if needed. */
2144 if (warn_switch_unreachable && !unreachable_issued)
2145 wi->info = emit_warn_switch_unreachable (stmt);
2146 /* Stop when auto var init warning is not on. */
2147 if (!warn_trivial_auto_var_init)
2148 return integer_zero_node;
2149 break;
2151 return NULL_TREE;
2155 /* Possibly warn about unreachable statements between switch's controlling
2156 expression and the first case. Also warn about -ftrivial-auto-var-init
2157 cannot initialize the auto variable under such situation.
2158 SEQ is the body of a switch expression. */
2160 static void
2161 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2163 if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2164 /* This warning doesn't play well with Fortran when optimizations
2165 are on. */
2166 || lang_GNU_Fortran ()
2167 || seq == NULL)
2168 return;
2170 struct walk_stmt_info wi;
2172 memset (&wi, 0, sizeof (wi));
2173 walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2177 /* A label entry that pairs label and a location. */
2178 struct label_entry
2180 tree label;
2181 location_t loc;
2184 /* Find LABEL in vector of label entries VEC. */
2186 static struct label_entry *
2187 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2189 unsigned int i;
2190 struct label_entry *l;
2192 FOR_EACH_VEC_ELT (*vec, i, l)
2193 if (l->label == label)
2194 return l;
2195 return NULL;
2198 /* Return true if LABEL, a LABEL_DECL, represents a case label
2199 in a vector of labels CASES. */
2201 static bool
2202 case_label_p (const vec<tree> *cases, tree label)
2204 unsigned int i;
2205 tree l;
2207 FOR_EACH_VEC_ELT (*cases, i, l)
2208 if (CASE_LABEL (l) == label)
2209 return true;
2210 return false;
2213 /* Find the last nondebug statement in a scope STMT. */
2215 static gimple *
2216 last_stmt_in_scope (gimple *stmt)
2218 if (!stmt)
2219 return NULL;
2221 switch (gimple_code (stmt))
2223 case GIMPLE_BIND:
2225 gbind *bind = as_a <gbind *> (stmt);
2226 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2227 return last_stmt_in_scope (stmt);
2230 case GIMPLE_TRY:
2232 gtry *try_stmt = as_a <gtry *> (stmt);
2233 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2234 gimple *last_eval = last_stmt_in_scope (stmt);
2235 if (gimple_stmt_may_fallthru (last_eval)
2236 && (last_eval == NULL
2237 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2238 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2240 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2241 return last_stmt_in_scope (stmt);
2243 else
2244 return last_eval;
2247 case GIMPLE_DEBUG:
2248 gcc_unreachable ();
2250 default:
2251 return stmt;
2255 /* Collect labels that may fall through into LABELS and return the statement
2256 preceding another case label, or a user-defined label. Store a location
2257 useful to give warnings at *PREVLOC (usually the location of the returned
2258 statement or of its surrounding scope). */
2260 static gimple *
2261 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2262 auto_vec <struct label_entry> *labels,
2263 location_t *prevloc)
2265 gimple *prev = NULL;
2267 *prevloc = UNKNOWN_LOCATION;
2270 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2272 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2273 which starts on a GIMPLE_SWITCH and ends with a break label.
2274 Handle that as a single statement that can fall through. */
2275 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2276 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2277 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2278 if (last
2279 && gimple_code (first) == GIMPLE_SWITCH
2280 && gimple_code (last) == GIMPLE_LABEL)
2282 tree label = gimple_label_label (as_a <glabel *> (last));
2283 if (SWITCH_BREAK_LABEL_P (label))
2285 prev = bind;
2286 gsi_next (gsi_p);
2287 continue;
2291 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2292 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2294 /* Nested scope. Only look at the last statement of
2295 the innermost scope. */
2296 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2297 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2298 if (last)
2300 prev = last;
2301 /* It might be a label without a location. Use the
2302 location of the scope then. */
2303 if (!gimple_has_location (prev))
2304 *prevloc = bind_loc;
2306 gsi_next (gsi_p);
2307 continue;
2310 /* Ifs are tricky. */
2311 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2313 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2314 tree false_lab = gimple_cond_false_label (cond_stmt);
2315 location_t if_loc = gimple_location (cond_stmt);
2317 /* If we have e.g.
2318 if (i > 1) goto <D.2259>; else goto D;
2319 we can't do much with the else-branch. */
2320 if (!DECL_ARTIFICIAL (false_lab))
2321 break;
2323 /* Go on until the false label, then one step back. */
2324 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2326 gimple *stmt = gsi_stmt (*gsi_p);
2327 if (gimple_code (stmt) == GIMPLE_LABEL
2328 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2329 break;
2332 /* Not found? Oops. */
2333 if (gsi_end_p (*gsi_p))
2334 break;
2336 /* A dead label can't fall through. */
2337 if (!UNUSED_LABEL_P (false_lab))
2339 struct label_entry l = { false_lab, if_loc };
2340 labels->safe_push (l);
2343 /* Go to the last statement of the then branch. */
2344 gsi_prev (gsi_p);
2346 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2347 <D.1759>:
2348 <stmt>;
2349 goto <D.1761>;
2350 <D.1760>:
2352 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2353 && !gimple_has_location (gsi_stmt (*gsi_p)))
2355 /* Look at the statement before, it might be
2356 attribute fallthrough, in which case don't warn. */
2357 gsi_prev (gsi_p);
2358 bool fallthru_before_dest
2359 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2360 gsi_next (gsi_p);
2361 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2362 if (!fallthru_before_dest)
2364 struct label_entry l = { goto_dest, if_loc };
2365 labels->safe_push (l);
2368 /* This case is about
2369 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2370 <D.2022>:
2371 n = n + 1; // #1
2372 <D.2023>: // #2
2373 <D.1988>: // #3
2374 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2375 through to #3. So set PREV to #1. */
2376 else if (UNUSED_LABEL_P (false_lab))
2377 prev = gsi_stmt (*gsi_p);
2379 /* And move back. */
2380 gsi_next (gsi_p);
2383 /* Remember the last statement. Skip labels that are of no interest
2384 to us. */
2385 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2387 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2388 if (find_label_entry (labels, label))
2389 prev = gsi_stmt (*gsi_p);
2391 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2393 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2395 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2396 prev = gsi_stmt (*gsi_p);
2397 gsi_next (gsi_p);
2399 while (!gsi_end_p (*gsi_p)
2400 /* Stop if we find a case or a user-defined label. */
2401 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2402 || !gimple_has_location (gsi_stmt (*gsi_p))));
2404 if (prev && gimple_has_location (prev))
2405 *prevloc = gimple_location (prev);
2406 return prev;
2409 /* Return true if the switch fallthough warning should occur. LABEL is
2410 the label statement that we're falling through to. */
2412 static bool
2413 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2415 gimple_stmt_iterator gsi = *gsi_p;
2417 /* Don't warn if the label is marked with a "falls through" comment. */
2418 if (FALLTHROUGH_LABEL_P (label))
2419 return false;
2421 /* Don't warn for non-case labels followed by a statement:
2422 case 0:
2423 foo ();
2424 label:
2425 bar ();
2426 as these are likely intentional. */
2427 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2429 tree l;
2430 while (!gsi_end_p (gsi)
2431 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2432 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2433 && !case_label_p (&gimplify_ctxp->case_labels, l))
2434 gsi_next_nondebug (&gsi);
2435 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2436 return false;
2439 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2440 immediately breaks. */
2441 gsi = *gsi_p;
2443 /* Skip all immediately following labels. */
2444 while (!gsi_end_p (gsi)
2445 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2446 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2447 gsi_next_nondebug (&gsi);
2449 /* { ... something; default:; } */
2450 if (gsi_end_p (gsi)
2451 /* { ... something; default: break; } or
2452 { ... something; default: goto L; } */
2453 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2454 /* { ... something; default: return; } */
2455 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2456 return false;
2458 return true;
2461 /* Callback for walk_gimple_seq. */
2463 static tree
2464 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2465 struct walk_stmt_info *)
2467 gimple *stmt = gsi_stmt (*gsi_p);
2469 *handled_ops_p = true;
2470 switch (gimple_code (stmt))
2472 case GIMPLE_TRY:
2473 case GIMPLE_BIND:
2474 case GIMPLE_CATCH:
2475 case GIMPLE_EH_FILTER:
2476 case GIMPLE_TRANSACTION:
2477 /* Walk the sub-statements. */
2478 *handled_ops_p = false;
2479 break;
2481 /* Find a sequence of form:
2483 GIMPLE_LABEL
2484 [...]
2485 <may fallthru stmt>
2486 GIMPLE_LABEL
2488 and possibly warn. */
2489 case GIMPLE_LABEL:
2491 /* Found a label. Skip all immediately following labels. */
2492 while (!gsi_end_p (*gsi_p)
2493 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2494 gsi_next_nondebug (gsi_p);
2496 /* There might be no more statements. */
2497 if (gsi_end_p (*gsi_p))
2498 return integer_zero_node;
2500 /* Vector of labels that fall through. */
2501 auto_vec <struct label_entry> labels;
2502 location_t prevloc;
2503 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2505 /* There might be no more statements. */
2506 if (gsi_end_p (*gsi_p))
2507 return integer_zero_node;
2509 gimple *next = gsi_stmt (*gsi_p);
2510 tree label;
2511 /* If what follows is a label, then we may have a fallthrough. */
2512 if (gimple_code (next) == GIMPLE_LABEL
2513 && gimple_has_location (next)
2514 && (label = gimple_label_label (as_a <glabel *> (next)))
2515 && prev != NULL)
2517 struct label_entry *l;
2518 bool warned_p = false;
2519 auto_diagnostic_group d;
2520 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2521 /* Quiet. */;
2522 else if (gimple_code (prev) == GIMPLE_LABEL
2523 && (label = gimple_label_label (as_a <glabel *> (prev)))
2524 && (l = find_label_entry (&labels, label)))
2525 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2526 "this statement may fall through");
2527 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2528 /* Try to be clever and don't warn when the statement
2529 can't actually fall through. */
2530 && gimple_stmt_may_fallthru (prev)
2531 && prevloc != UNKNOWN_LOCATION)
2532 warned_p = warning_at (prevloc,
2533 OPT_Wimplicit_fallthrough_,
2534 "this statement may fall through");
2535 if (warned_p)
2536 inform (gimple_location (next), "here");
2538 /* Mark this label as processed so as to prevent multiple
2539 warnings in nested switches. */
2540 FALLTHROUGH_LABEL_P (label) = true;
2542 /* So that next warn_implicit_fallthrough_r will start looking for
2543 a new sequence starting with this label. */
2544 gsi_prev (gsi_p);
2547 break;
2548 default:
2549 break;
2551 return NULL_TREE;
2554 /* Warn when a switch case falls through. */
2556 static void
2557 maybe_warn_implicit_fallthrough (gimple_seq seq)
2559 if (!warn_implicit_fallthrough)
2560 return;
2562 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2563 if (!(lang_GNU_C ()
2564 || lang_GNU_CXX ()
2565 || lang_GNU_OBJC ()))
2566 return;
2568 struct walk_stmt_info wi;
2569 memset (&wi, 0, sizeof (wi));
2570 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2573 /* Callback for walk_gimple_seq. */
2575 static tree
2576 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2577 struct walk_stmt_info *wi)
2579 gimple *stmt = gsi_stmt (*gsi_p);
2581 *handled_ops_p = true;
2582 switch (gimple_code (stmt))
2584 case GIMPLE_TRY:
2585 case GIMPLE_BIND:
2586 case GIMPLE_CATCH:
2587 case GIMPLE_EH_FILTER:
2588 case GIMPLE_TRANSACTION:
2589 /* Walk the sub-statements. */
2590 *handled_ops_p = false;
2591 break;
2592 case GIMPLE_CALL:
2593 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2595 gsi_remove (gsi_p, true);
2596 if (gsi_end_p (*gsi_p))
2598 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2599 return integer_zero_node;
2602 bool found = false;
2603 location_t loc = gimple_location (stmt);
2605 gimple_stmt_iterator gsi2 = *gsi_p;
2606 stmt = gsi_stmt (gsi2);
2607 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2609 /* Go on until the artificial label. */
2610 tree goto_dest = gimple_goto_dest (stmt);
2611 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2613 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2614 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2615 == goto_dest)
2616 break;
2619 /* Not found? Stop. */
2620 if (gsi_end_p (gsi2))
2621 break;
2623 /* Look one past it. */
2624 gsi_next (&gsi2);
2627 /* We're looking for a case label or default label here. */
2628 while (!gsi_end_p (gsi2))
2630 stmt = gsi_stmt (gsi2);
2631 if (gimple_code (stmt) == GIMPLE_LABEL)
2633 tree label = gimple_label_label (as_a <glabel *> (stmt));
2634 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2636 found = true;
2637 break;
2640 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2642 else if (!is_gimple_debug (stmt))
2643 /* Anything else is not expected. */
2644 break;
2645 gsi_next (&gsi2);
2647 if (!found)
2648 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2649 "a case label or default label");
2651 break;
2652 default:
2653 break;
2655 return NULL_TREE;
2658 /* Expand all FALLTHROUGH () calls in SEQ. */
2660 static void
2661 expand_FALLTHROUGH (gimple_seq *seq_p)
2663 struct walk_stmt_info wi;
2664 location_t loc;
2665 memset (&wi, 0, sizeof (wi));
2666 wi.info = (void *) &loc;
2667 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2668 if (wi.callback_result == integer_zero_node)
2669 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2670 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2671 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2672 "a case label or default label");
2676 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2677 branch to. */
2679 static enum gimplify_status
2680 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2682 tree switch_expr = *expr_p;
2683 gimple_seq switch_body_seq = NULL;
2684 enum gimplify_status ret;
2685 tree index_type = TREE_TYPE (switch_expr);
2686 if (index_type == NULL_TREE)
2687 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2689 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2690 fb_rvalue);
2691 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2692 return ret;
2694 if (SWITCH_BODY (switch_expr))
2696 vec<tree> labels;
2697 vec<tree> saved_labels;
2698 hash_set<tree> *saved_live_switch_vars = NULL;
2699 tree default_case = NULL_TREE;
2700 gswitch *switch_stmt;
2702 /* Save old labels, get new ones from body, then restore the old
2703 labels. Save all the things from the switch body to append after. */
2704 saved_labels = gimplify_ctxp->case_labels;
2705 gimplify_ctxp->case_labels.create (8);
2707 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2708 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2709 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2710 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2711 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2712 else
2713 gimplify_ctxp->live_switch_vars = NULL;
2715 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2716 gimplify_ctxp->in_switch_expr = true;
2718 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2720 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2721 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq);
2722 maybe_warn_implicit_fallthrough (switch_body_seq);
2723 /* Only do this for the outermost GIMPLE_SWITCH. */
2724 if (!gimplify_ctxp->in_switch_expr)
2725 expand_FALLTHROUGH (&switch_body_seq);
2727 labels = gimplify_ctxp->case_labels;
2728 gimplify_ctxp->case_labels = saved_labels;
2730 if (gimplify_ctxp->live_switch_vars)
2732 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2733 delete gimplify_ctxp->live_switch_vars;
2735 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2737 preprocess_case_label_vec_for_gimple (labels, index_type,
2738 &default_case);
2740 bool add_bind = false;
2741 if (!default_case)
2743 glabel *new_default;
2745 default_case
2746 = build_case_label (NULL_TREE, NULL_TREE,
2747 create_artificial_label (UNKNOWN_LOCATION));
2748 if (old_in_switch_expr)
2750 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2751 add_bind = true;
2753 new_default = gimple_build_label (CASE_LABEL (default_case));
2754 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2756 else if (old_in_switch_expr)
2758 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2759 if (last && gimple_code (last) == GIMPLE_LABEL)
2761 tree label = gimple_label_label (as_a <glabel *> (last));
2762 if (SWITCH_BREAK_LABEL_P (label))
2763 add_bind = true;
2767 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2768 default_case, labels);
2769 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2770 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2771 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2772 so that we can easily find the start and end of the switch
2773 statement. */
2774 if (add_bind)
2776 gimple_seq bind_body = NULL;
2777 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2778 gimple_seq_add_seq (&bind_body, switch_body_seq);
2779 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2780 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2781 gimplify_seq_add_stmt (pre_p, bind);
2783 else
2785 gimplify_seq_add_stmt (pre_p, switch_stmt);
2786 gimplify_seq_add_seq (pre_p, switch_body_seq);
2788 labels.release ();
2790 else
2791 gcc_unreachable ();
2793 return GS_ALL_DONE;
2796 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2798 static enum gimplify_status
2799 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2801 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2802 == current_function_decl);
2804 tree label = LABEL_EXPR_LABEL (*expr_p);
2805 glabel *label_stmt = gimple_build_label (label);
2806 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2807 gimplify_seq_add_stmt (pre_p, label_stmt);
2809 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2810 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2811 NOT_TAKEN));
2812 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2813 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2814 TAKEN));
2816 return GS_ALL_DONE;
2819 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2821 static enum gimplify_status
2822 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2824 struct gimplify_ctx *ctxp;
2825 glabel *label_stmt;
2827 /* Invalid programs can play Duff's Device type games with, for example,
2828 #pragma omp parallel. At least in the C front end, we don't
2829 detect such invalid branches until after gimplification, in the
2830 diagnose_omp_blocks pass. */
2831 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2832 if (ctxp->case_labels.exists ())
2833 break;
2835 tree label = CASE_LABEL (*expr_p);
2836 label_stmt = gimple_build_label (label);
2837 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2838 ctxp->case_labels.safe_push (*expr_p);
2839 gimplify_seq_add_stmt (pre_p, label_stmt);
2841 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2842 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2843 NOT_TAKEN));
2844 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2845 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2846 TAKEN));
2848 return GS_ALL_DONE;
2851 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2852 if necessary. */
2854 tree
2855 build_and_jump (tree *label_p)
2857 if (label_p == NULL)
2858 /* If there's nowhere to jump, just fall through. */
2859 return NULL_TREE;
2861 if (*label_p == NULL_TREE)
2863 tree label = create_artificial_label (UNKNOWN_LOCATION);
2864 *label_p = label;
2867 return build1 (GOTO_EXPR, void_type_node, *label_p);
2870 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2871 This also involves building a label to jump to and communicating it to
2872 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2874 static enum gimplify_status
2875 gimplify_exit_expr (tree *expr_p)
2877 tree cond = TREE_OPERAND (*expr_p, 0);
2878 tree expr;
2880 expr = build_and_jump (&gimplify_ctxp->exit_label);
2881 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2882 *expr_p = expr;
2884 return GS_OK;
2887 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2888 different from its canonical type, wrap the whole thing inside a
2889 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2890 type.
2892 The canonical type of a COMPONENT_REF is the type of the field being
2893 referenced--unless the field is a bit-field which can be read directly
2894 in a smaller mode, in which case the canonical type is the
2895 sign-appropriate type corresponding to that mode. */
2897 static void
2898 canonicalize_component_ref (tree *expr_p)
2900 tree expr = *expr_p;
2901 tree type;
2903 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2905 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2906 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2907 else
2908 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2910 /* One could argue that all the stuff below is not necessary for
2911 the non-bitfield case and declare it a FE error if type
2912 adjustment would be needed. */
2913 if (TREE_TYPE (expr) != type)
2915 #ifdef ENABLE_TYPES_CHECKING
2916 tree old_type = TREE_TYPE (expr);
2917 #endif
2918 int type_quals;
2920 /* We need to preserve qualifiers and propagate them from
2921 operand 0. */
2922 type_quals = TYPE_QUALS (type)
2923 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2924 if (TYPE_QUALS (type) != type_quals)
2925 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2927 /* Set the type of the COMPONENT_REF to the underlying type. */
2928 TREE_TYPE (expr) = type;
2930 #ifdef ENABLE_TYPES_CHECKING
2931 /* It is now a FE error, if the conversion from the canonical
2932 type to the original expression type is not useless. */
2933 gcc_assert (useless_type_conversion_p (old_type, type));
2934 #endif
2938 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2939 to foo, embed that change in the ADDR_EXPR by converting
2940 T array[U];
2941 (T *)&array
2943 &array[L]
2944 where L is the lower bound. For simplicity, only do this for constant
2945 lower bound.
2946 The constraint is that the type of &array[L] is trivially convertible
2947 to T *. */
2949 static void
2950 canonicalize_addr_expr (tree *expr_p)
2952 tree expr = *expr_p;
2953 tree addr_expr = TREE_OPERAND (expr, 0);
2954 tree datype, ddatype, pddatype;
2956 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2957 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2958 || TREE_CODE (addr_expr) != ADDR_EXPR)
2959 return;
2961 /* The addr_expr type should be a pointer to an array. */
2962 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2963 if (TREE_CODE (datype) != ARRAY_TYPE)
2964 return;
2966 /* The pointer to element type shall be trivially convertible to
2967 the expression pointer type. */
2968 ddatype = TREE_TYPE (datype);
2969 pddatype = build_pointer_type (ddatype);
2970 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2971 pddatype))
2972 return;
2974 /* The lower bound and element sizes must be constant. */
2975 if (!TYPE_SIZE_UNIT (ddatype)
2976 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2977 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2978 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2979 return;
2981 /* All checks succeeded. Build a new node to merge the cast. */
2982 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2983 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2984 NULL_TREE, NULL_TREE);
2985 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2987 /* We can have stripped a required restrict qualifier above. */
2988 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2989 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2992 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2993 underneath as appropriate. */
2995 static enum gimplify_status
2996 gimplify_conversion (tree *expr_p)
2998 location_t loc = EXPR_LOCATION (*expr_p);
2999 gcc_assert (CONVERT_EXPR_P (*expr_p));
3001 /* Then strip away all but the outermost conversion. */
3002 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3004 /* And remove the outermost conversion if it's useless. */
3005 if (tree_ssa_useless_type_conversion (*expr_p))
3006 *expr_p = TREE_OPERAND (*expr_p, 0);
3008 /* If we still have a conversion at the toplevel,
3009 then canonicalize some constructs. */
3010 if (CONVERT_EXPR_P (*expr_p))
3012 tree sub = TREE_OPERAND (*expr_p, 0);
3014 /* If a NOP conversion is changing the type of a COMPONENT_REF
3015 expression, then canonicalize its type now in order to expose more
3016 redundant conversions. */
3017 if (TREE_CODE (sub) == COMPONENT_REF)
3018 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
3020 /* If a NOP conversion is changing a pointer to array of foo
3021 to a pointer to foo, embed that change in the ADDR_EXPR. */
3022 else if (TREE_CODE (sub) == ADDR_EXPR)
3023 canonicalize_addr_expr (expr_p);
3026 /* If we have a conversion to a non-register type force the
3027 use of a VIEW_CONVERT_EXPR instead. */
3028 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3029 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3030 TREE_OPERAND (*expr_p, 0));
3032 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3033 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3034 TREE_SET_CODE (*expr_p, NOP_EXPR);
3036 return GS_OK;
3039 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3040 DECL_VALUE_EXPR, and it's worth re-examining things. */
3042 static enum gimplify_status
3043 gimplify_var_or_parm_decl (tree *expr_p)
3045 tree decl = *expr_p;
3047 /* ??? If this is a local variable, and it has not been seen in any
3048 outer BIND_EXPR, then it's probably the result of a duplicate
3049 declaration, for which we've already issued an error. It would
3050 be really nice if the front end wouldn't leak these at all.
3051 Currently the only known culprit is C++ destructors, as seen
3052 in g++.old-deja/g++.jason/binding.C.
3053 Another possible culpit are size expressions for variably modified
3054 types which are lost in the FE or not gimplified correctly. */
3055 if (VAR_P (decl)
3056 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3057 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3058 && decl_function_context (decl) == current_function_decl)
3060 gcc_assert (seen_error ());
3061 return GS_ERROR;
3064 /* When within an OMP context, notice uses of variables. */
3065 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3066 return GS_ALL_DONE;
3068 /* If the decl is an alias for another expression, substitute it now. */
3069 if (DECL_HAS_VALUE_EXPR_P (decl))
3071 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3072 return GS_OK;
3075 return GS_ALL_DONE;
3078 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3080 static void
3081 recalculate_side_effects (tree t)
3083 enum tree_code code = TREE_CODE (t);
3084 int len = TREE_OPERAND_LENGTH (t);
3085 int i;
3087 switch (TREE_CODE_CLASS (code))
3089 case tcc_expression:
3090 switch (code)
3092 case INIT_EXPR:
3093 case MODIFY_EXPR:
3094 case VA_ARG_EXPR:
3095 case PREDECREMENT_EXPR:
3096 case PREINCREMENT_EXPR:
3097 case POSTDECREMENT_EXPR:
3098 case POSTINCREMENT_EXPR:
3099 /* All of these have side-effects, no matter what their
3100 operands are. */
3101 return;
3103 default:
3104 break;
3106 /* Fall through. */
3108 case tcc_comparison: /* a comparison expression */
3109 case tcc_unary: /* a unary arithmetic expression */
3110 case tcc_binary: /* a binary arithmetic expression */
3111 case tcc_reference: /* a reference */
3112 case tcc_vl_exp: /* a function call */
3113 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3114 for (i = 0; i < len; ++i)
3116 tree op = TREE_OPERAND (t, i);
3117 if (op && TREE_SIDE_EFFECTS (op))
3118 TREE_SIDE_EFFECTS (t) = 1;
3120 break;
3122 case tcc_constant:
3123 /* No side-effects. */
3124 return;
3126 default:
3127 gcc_unreachable ();
3131 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3132 node *EXPR_P.
3134 compound_lval
3135 : min_lval '[' val ']'
3136 | min_lval '.' ID
3137 | compound_lval '[' val ']'
3138 | compound_lval '.' ID
3140 This is not part of the original SIMPLE definition, which separates
3141 array and member references, but it seems reasonable to handle them
3142 together. Also, this way we don't run into problems with union
3143 aliasing; gcc requires that for accesses through a union to alias, the
3144 union reference must be explicit, which was not always the case when we
3145 were splitting up array and member refs.
3147 PRE_P points to the sequence where side effects that must happen before
3148 *EXPR_P should be stored.
3150 POST_P points to the sequence where side effects that must happen after
3151 *EXPR_P should be stored. */
3153 static enum gimplify_status
3154 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3155 fallback_t fallback)
3157 tree *p;
3158 enum gimplify_status ret = GS_ALL_DONE, tret;
3159 int i;
3160 location_t loc = EXPR_LOCATION (*expr_p);
3161 tree expr = *expr_p;
3163 /* Create a stack of the subexpressions so later we can walk them in
3164 order from inner to outer. */
3165 auto_vec<tree, 10> expr_stack;
3167 /* We can handle anything that get_inner_reference can deal with. */
3168 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3170 restart:
3171 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3172 if (TREE_CODE (*p) == INDIRECT_REF)
3173 *p = fold_indirect_ref_loc (loc, *p);
3175 if (handled_component_p (*p))
3177 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3178 additional COMPONENT_REFs. */
3179 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3180 && gimplify_var_or_parm_decl (p) == GS_OK)
3181 goto restart;
3182 else
3183 break;
3185 expr_stack.safe_push (*p);
3188 gcc_assert (expr_stack.length ());
3190 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3191 walked through and P points to the innermost expression.
3193 Java requires that we elaborated nodes in source order. That
3194 means we must gimplify the inner expression followed by each of
3195 the indices, in order. But we can't gimplify the inner
3196 expression until we deal with any variable bounds, sizes, or
3197 positions in order to deal with PLACEHOLDER_EXPRs.
3199 The base expression may contain a statement expression that
3200 has declarations used in size expressions, so has to be
3201 gimplified before gimplifying the size expressions.
3203 So we do this in three steps. First we deal with variable
3204 bounds, sizes, and positions, then we gimplify the base and
3205 ensure it is memory if needed, then we deal with the annotations
3206 for any variables in the components and any indices, from left
3207 to right. */
3209 bool need_non_reg = false;
3210 for (i = expr_stack.length () - 1; i >= 0; i--)
3212 tree t = expr_stack[i];
3214 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3216 /* Deal with the low bound and element type size and put them into
3217 the ARRAY_REF. If these values are set, they have already been
3218 gimplified. */
3219 if (TREE_OPERAND (t, 2) == NULL_TREE)
3221 tree low = unshare_expr (array_ref_low_bound (t));
3222 if (!is_gimple_min_invariant (low))
3224 TREE_OPERAND (t, 2) = low;
3228 if (TREE_OPERAND (t, 3) == NULL_TREE)
3230 tree elmt_size = array_ref_element_size (t);
3231 if (!is_gimple_min_invariant (elmt_size))
3233 elmt_size = unshare_expr (elmt_size);
3234 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3235 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3237 /* Divide the element size by the alignment of the element
3238 type (above). */
3239 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3240 elmt_size, factor);
3242 TREE_OPERAND (t, 3) = elmt_size;
3245 need_non_reg = true;
3247 else if (TREE_CODE (t) == COMPONENT_REF)
3249 /* Set the field offset into T and gimplify it. */
3250 if (TREE_OPERAND (t, 2) == NULL_TREE)
3252 tree offset = component_ref_field_offset (t);
3253 if (!is_gimple_min_invariant (offset))
3255 offset = unshare_expr (offset);
3256 tree field = TREE_OPERAND (t, 1);
3257 tree factor
3258 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3260 /* Divide the offset by its alignment. */
3261 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3262 offset, factor);
3264 TREE_OPERAND (t, 2) = offset;
3267 need_non_reg = true;
3271 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3272 so as to match the min_lval predicate. Failure to do so may result
3273 in the creation of large aggregate temporaries. */
3274 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3275 fallback | fb_lvalue);
3276 ret = MIN (ret, tret);
3278 /* Step 2a: if we have component references we do not support on
3279 registers then make sure the base isn't a register. Of course
3280 we can only do so if an rvalue is OK. */
3281 if (need_non_reg && (fallback & fb_rvalue))
3282 prepare_gimple_addressable (p, pre_p);
3284 /* Step 3: gimplify size expressions and the indices and operands of
3285 ARRAY_REF. During this loop we also remove any useless conversions. */
3287 for (; expr_stack.length () > 0; )
3289 tree t = expr_stack.pop ();
3291 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3293 /* Gimplify the low bound and element type size. */
3294 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3295 is_gimple_reg, fb_rvalue);
3296 ret = MIN (ret, tret);
3298 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3299 is_gimple_reg, fb_rvalue);
3300 ret = MIN (ret, tret);
3302 /* Gimplify the dimension. */
3303 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3304 is_gimple_val, fb_rvalue);
3305 ret = MIN (ret, tret);
3307 else if (TREE_CODE (t) == COMPONENT_REF)
3309 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3310 is_gimple_reg, fb_rvalue);
3311 ret = MIN (ret, tret);
3314 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3316 /* The innermost expression P may have originally had
3317 TREE_SIDE_EFFECTS set which would have caused all the outer
3318 expressions in *EXPR_P leading to P to also have had
3319 TREE_SIDE_EFFECTS set. */
3320 recalculate_side_effects (t);
3323 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3324 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3326 canonicalize_component_ref (expr_p);
3329 expr_stack.release ();
3331 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3333 return ret;
3336 /* Gimplify the self modifying expression pointed to by EXPR_P
3337 (++, --, +=, -=).
3339 PRE_P points to the list where side effects that must happen before
3340 *EXPR_P should be stored.
3342 POST_P points to the list where side effects that must happen after
3343 *EXPR_P should be stored.
3345 WANT_VALUE is nonzero iff we want to use the value of this expression
3346 in another expression.
3348 ARITH_TYPE is the type the computation should be performed in. */
3350 enum gimplify_status
3351 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3352 bool want_value, tree arith_type)
3354 enum tree_code code;
3355 tree lhs, lvalue, rhs, t1;
3356 gimple_seq post = NULL, *orig_post_p = post_p;
3357 bool postfix;
3358 enum tree_code arith_code;
3359 enum gimplify_status ret;
3360 location_t loc = EXPR_LOCATION (*expr_p);
3362 code = TREE_CODE (*expr_p);
3364 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3365 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3367 /* Prefix or postfix? */
3368 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3369 /* Faster to treat as prefix if result is not used. */
3370 postfix = want_value;
3371 else
3372 postfix = false;
3374 /* For postfix, make sure the inner expression's post side effects
3375 are executed after side effects from this expression. */
3376 if (postfix)
3377 post_p = &post;
3379 /* Add or subtract? */
3380 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3381 arith_code = PLUS_EXPR;
3382 else
3383 arith_code = MINUS_EXPR;
3385 /* Gimplify the LHS into a GIMPLE lvalue. */
3386 lvalue = TREE_OPERAND (*expr_p, 0);
3387 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3388 if (ret == GS_ERROR)
3389 return ret;
3391 /* Extract the operands to the arithmetic operation. */
3392 lhs = lvalue;
3393 rhs = TREE_OPERAND (*expr_p, 1);
3395 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3396 that as the result value and in the postqueue operation. */
3397 if (postfix)
3399 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3400 if (ret == GS_ERROR)
3401 return ret;
3403 lhs = get_initialized_tmp_var (lhs, pre_p);
3406 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3407 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3409 rhs = convert_to_ptrofftype_loc (loc, rhs);
3410 if (arith_code == MINUS_EXPR)
3411 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3412 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3414 else
3415 t1 = fold_convert (TREE_TYPE (*expr_p),
3416 fold_build2 (arith_code, arith_type,
3417 fold_convert (arith_type, lhs),
3418 fold_convert (arith_type, rhs)));
3420 if (postfix)
3422 gimplify_assign (lvalue, t1, pre_p);
3423 gimplify_seq_add_seq (orig_post_p, post);
3424 *expr_p = lhs;
3425 return GS_ALL_DONE;
3427 else
3429 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3430 return GS_OK;
3434 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3436 static void
3437 maybe_with_size_expr (tree *expr_p)
3439 tree expr = *expr_p;
3440 tree type = TREE_TYPE (expr);
3441 tree size;
3443 /* If we've already wrapped this or the type is error_mark_node, we can't do
3444 anything. */
3445 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3446 || type == error_mark_node)
3447 return;
3449 /* If the size isn't known or is a constant, we have nothing to do. */
3450 size = TYPE_SIZE_UNIT (type);
3451 if (!size || poly_int_tree_p (size))
3452 return;
3454 /* Otherwise, make a WITH_SIZE_EXPR. */
3455 size = unshare_expr (size);
3456 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3457 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3460 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3461 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3462 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3463 gimplified to an SSA name. */
3465 enum gimplify_status
3466 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3467 bool allow_ssa)
3469 bool (*test) (tree);
3470 fallback_t fb;
3472 /* In general, we allow lvalues for function arguments to avoid
3473 extra overhead of copying large aggregates out of even larger
3474 aggregates into temporaries only to copy the temporaries to
3475 the argument list. Make optimizers happy by pulling out to
3476 temporaries those types that fit in registers. */
3477 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3478 test = is_gimple_val, fb = fb_rvalue;
3479 else
3481 test = is_gimple_lvalue, fb = fb_either;
3482 /* Also strip a TARGET_EXPR that would force an extra copy. */
3483 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3485 tree init = TARGET_EXPR_INITIAL (*arg_p);
3486 if (init
3487 && !VOID_TYPE_P (TREE_TYPE (init)))
3488 *arg_p = init;
3492 /* If this is a variable sized type, we must remember the size. */
3493 maybe_with_size_expr (arg_p);
3495 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3496 /* Make sure arguments have the same location as the function call
3497 itself. */
3498 protected_set_expr_location (*arg_p, call_location);
3500 /* There is a sequence point before a function call. Side effects in
3501 the argument list must occur before the actual call. So, when
3502 gimplifying arguments, force gimplify_expr to use an internal
3503 post queue which is then appended to the end of PRE_P. */
3504 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3507 /* Don't fold inside offloading or taskreg regions: it can break code by
3508 adding decl references that weren't in the source. We'll do it during
3509 omplower pass instead. */
3511 static bool
3512 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3514 struct gimplify_omp_ctx *ctx;
3515 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3516 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3517 return false;
3518 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3519 return false;
3520 /* Delay folding of builtins until the IL is in consistent state
3521 so the diagnostic machinery can do a better job. */
3522 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3523 return false;
3524 return fold_stmt (gsi);
3527 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3528 WANT_VALUE is true if the result of the call is desired. */
3530 static enum gimplify_status
3531 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3533 tree fndecl, parms, p, fnptrtype;
3534 enum gimplify_status ret;
3535 int i, nargs;
3536 gcall *call;
3537 bool builtin_va_start_p = false;
3538 location_t loc = EXPR_LOCATION (*expr_p);
3540 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3542 /* For reliable diagnostics during inlining, it is necessary that
3543 every call_expr be annotated with file and line. */
3544 if (! EXPR_HAS_LOCATION (*expr_p))
3545 SET_EXPR_LOCATION (*expr_p, input_location);
3547 /* Gimplify internal functions created in the FEs. */
3548 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3550 if (want_value)
3551 return GS_ALL_DONE;
3553 nargs = call_expr_nargs (*expr_p);
3554 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3555 auto_vec<tree> vargs (nargs);
3557 if (ifn == IFN_ASSUME)
3559 if (simple_condition_p (CALL_EXPR_ARG (*expr_p, 0)))
3561 /* If the [[assume (cond)]]; condition is simple
3562 enough and can be evaluated unconditionally
3563 without side-effects, expand it as
3564 if (!cond) __builtin_unreachable (); */
3565 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
3566 *expr_p = build3 (COND_EXPR, void_type_node,
3567 CALL_EXPR_ARG (*expr_p, 0), void_node,
3568 build_call_expr_loc (EXPR_LOCATION (*expr_p),
3569 fndecl, 0));
3570 return GS_OK;
3572 /* FIXME: Otherwise expand it specially. */
3573 return GS_ALL_DONE;
3576 for (i = 0; i < nargs; i++)
3578 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3579 EXPR_LOCATION (*expr_p));
3580 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3583 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3584 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3585 gimplify_seq_add_stmt (pre_p, call);
3586 return GS_ALL_DONE;
3589 /* This may be a call to a builtin function.
3591 Builtin function calls may be transformed into different
3592 (and more efficient) builtin function calls under certain
3593 circumstances. Unfortunately, gimplification can muck things
3594 up enough that the builtin expanders are not aware that certain
3595 transformations are still valid.
3597 So we attempt transformation/gimplification of the call before
3598 we gimplify the CALL_EXPR. At this time we do not manage to
3599 transform all calls in the same manner as the expanders do, but
3600 we do transform most of them. */
3601 fndecl = get_callee_fndecl (*expr_p);
3602 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3603 switch (DECL_FUNCTION_CODE (fndecl))
3605 CASE_BUILT_IN_ALLOCA:
3606 /* If the call has been built for a variable-sized object, then we
3607 want to restore the stack level when the enclosing BIND_EXPR is
3608 exited to reclaim the allocated space; otherwise, we precisely
3609 need to do the opposite and preserve the latest stack level. */
3610 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3611 gimplify_ctxp->save_stack = true;
3612 else
3613 gimplify_ctxp->keep_stack = true;
3614 break;
3616 case BUILT_IN_VA_START:
3618 builtin_va_start_p = TRUE;
3619 if (call_expr_nargs (*expr_p) < 2)
3621 error ("too few arguments to function %<va_start%>");
3622 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3623 return GS_OK;
3626 if (fold_builtin_next_arg (*expr_p, true))
3628 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3629 return GS_OK;
3631 break;
3634 case BUILT_IN_EH_RETURN:
3635 cfun->calls_eh_return = true;
3636 break;
3638 case BUILT_IN_CLEAR_PADDING:
3639 if (call_expr_nargs (*expr_p) == 1)
3641 /* Remember the original type of the argument in an internal
3642 dummy second argument, as in GIMPLE pointer conversions are
3643 useless. Also mark this call as not for automatic
3644 initialization in the internal dummy third argument. */
3645 p = CALL_EXPR_ARG (*expr_p, 0);
3646 *expr_p
3647 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3648 build_zero_cst (TREE_TYPE (p)));
3649 return GS_OK;
3651 break;
3653 default:
3656 if (fndecl && fndecl_built_in_p (fndecl))
3658 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3659 if (new_tree && new_tree != *expr_p)
3661 /* There was a transformation of this call which computes the
3662 same value, but in a more efficient way. Return and try
3663 again. */
3664 *expr_p = new_tree;
3665 return GS_OK;
3669 /* Remember the original function pointer type. */
3670 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3672 if (flag_openmp
3673 && fndecl
3674 && cfun
3675 && (cfun->curr_properties & PROP_gimple_any) == 0)
3677 tree variant = omp_resolve_declare_variant (fndecl);
3678 if (variant != fndecl)
3679 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3682 /* There is a sequence point before the call, so any side effects in
3683 the calling expression must occur before the actual call. Force
3684 gimplify_expr to use an internal post queue. */
3685 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3686 is_gimple_call_addr, fb_rvalue);
3688 nargs = call_expr_nargs (*expr_p);
3690 /* Get argument types for verification. */
3691 fndecl = get_callee_fndecl (*expr_p);
3692 parms = NULL_TREE;
3693 if (fndecl)
3694 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3695 else
3696 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3698 if (fndecl && DECL_ARGUMENTS (fndecl))
3699 p = DECL_ARGUMENTS (fndecl);
3700 else if (parms)
3701 p = parms;
3702 else
3703 p = NULL_TREE;
3704 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3707 /* If the last argument is __builtin_va_arg_pack () and it is not
3708 passed as a named argument, decrease the number of CALL_EXPR
3709 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3710 if (!p
3711 && i < nargs
3712 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3714 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3715 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3717 if (last_arg_fndecl
3718 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3720 tree call = *expr_p;
3722 --nargs;
3723 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3724 CALL_EXPR_FN (call),
3725 nargs, CALL_EXPR_ARGP (call));
3727 /* Copy all CALL_EXPR flags, location and block, except
3728 CALL_EXPR_VA_ARG_PACK flag. */
3729 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3730 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3731 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3732 = CALL_EXPR_RETURN_SLOT_OPT (call);
3733 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3734 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3736 /* Set CALL_EXPR_VA_ARG_PACK. */
3737 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3741 /* If the call returns twice then after building the CFG the call
3742 argument computations will no longer dominate the call because
3743 we add an abnormal incoming edge to the call. So do not use SSA
3744 vars there. */
3745 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3747 /* Gimplify the function arguments. */
3748 if (nargs > 0)
3750 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3751 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3752 PUSH_ARGS_REVERSED ? i-- : i++)
3754 enum gimplify_status t;
3756 /* Avoid gimplifying the second argument to va_start, which needs to
3757 be the plain PARM_DECL. */
3758 if ((i != 1) || !builtin_va_start_p)
3760 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3761 EXPR_LOCATION (*expr_p), ! returns_twice);
3763 if (t == GS_ERROR)
3764 ret = GS_ERROR;
3769 /* Gimplify the static chain. */
3770 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3772 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3773 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3774 else
3776 enum gimplify_status t;
3777 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3778 EXPR_LOCATION (*expr_p), ! returns_twice);
3779 if (t == GS_ERROR)
3780 ret = GS_ERROR;
3784 /* Verify the function result. */
3785 if (want_value && fndecl
3786 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3788 error_at (loc, "using result of function returning %<void%>");
3789 ret = GS_ERROR;
3792 /* Try this again in case gimplification exposed something. */
3793 if (ret != GS_ERROR)
3795 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3797 if (new_tree && new_tree != *expr_p)
3799 /* There was a transformation of this call which computes the
3800 same value, but in a more efficient way. Return and try
3801 again. */
3802 *expr_p = new_tree;
3803 return GS_OK;
3806 else
3808 *expr_p = error_mark_node;
3809 return GS_ERROR;
3812 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3813 decl. This allows us to eliminate redundant or useless
3814 calls to "const" functions. */
3815 if (TREE_CODE (*expr_p) == CALL_EXPR)
3817 int flags = call_expr_flags (*expr_p);
3818 if (flags & (ECF_CONST | ECF_PURE)
3819 /* An infinite loop is considered a side effect. */
3820 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3821 TREE_SIDE_EFFECTS (*expr_p) = 0;
3824 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3825 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3826 form and delegate the creation of a GIMPLE_CALL to
3827 gimplify_modify_expr. This is always possible because when
3828 WANT_VALUE is true, the caller wants the result of this call into
3829 a temporary, which means that we will emit an INIT_EXPR in
3830 internal_get_tmp_var which will then be handled by
3831 gimplify_modify_expr. */
3832 if (!want_value)
3834 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3835 have to do is replicate it as a GIMPLE_CALL tuple. */
3836 gimple_stmt_iterator gsi;
3837 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3838 notice_special_calls (call);
3839 gimplify_seq_add_stmt (pre_p, call);
3840 gsi = gsi_last (*pre_p);
3841 maybe_fold_stmt (&gsi);
3842 *expr_p = NULL_TREE;
3844 else
3845 /* Remember the original function type. */
3846 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3847 CALL_EXPR_FN (*expr_p));
3849 return ret;
3852 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3853 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3855 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3856 condition is true or false, respectively. If null, we should generate
3857 our own to skip over the evaluation of this specific expression.
3859 LOCUS is the source location of the COND_EXPR.
3861 This function is the tree equivalent of do_jump.
3863 shortcut_cond_r should only be called by shortcut_cond_expr. */
3865 static tree
3866 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3867 location_t locus)
3869 tree local_label = NULL_TREE;
3870 tree t, expr = NULL;
3872 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3873 retain the shortcut semantics. Just insert the gotos here;
3874 shortcut_cond_expr will append the real blocks later. */
3875 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3877 location_t new_locus;
3879 /* Turn if (a && b) into
3881 if (a); else goto no;
3882 if (b) goto yes; else goto no;
3883 (no:) */
3885 if (false_label_p == NULL)
3886 false_label_p = &local_label;
3888 /* Keep the original source location on the first 'if'. */
3889 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3890 append_to_statement_list (t, &expr);
3892 /* Set the source location of the && on the second 'if'. */
3893 new_locus = rexpr_location (pred, locus);
3894 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3895 new_locus);
3896 append_to_statement_list (t, &expr);
3898 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3900 location_t new_locus;
3902 /* Turn if (a || b) into
3904 if (a) goto yes;
3905 if (b) goto yes; else goto no;
3906 (yes:) */
3908 if (true_label_p == NULL)
3909 true_label_p = &local_label;
3911 /* Keep the original source location on the first 'if'. */
3912 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3913 append_to_statement_list (t, &expr);
3915 /* Set the source location of the || on the second 'if'. */
3916 new_locus = rexpr_location (pred, locus);
3917 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3918 new_locus);
3919 append_to_statement_list (t, &expr);
3921 else if (TREE_CODE (pred) == COND_EXPR
3922 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3923 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3925 location_t new_locus;
3927 /* As long as we're messing with gotos, turn if (a ? b : c) into
3928 if (a)
3929 if (b) goto yes; else goto no;
3930 else
3931 if (c) goto yes; else goto no;
3933 Don't do this if one of the arms has void type, which can happen
3934 in C++ when the arm is throw. */
3936 /* Keep the original source location on the first 'if'. Set the source
3937 location of the ? on the second 'if'. */
3938 new_locus = rexpr_location (pred, locus);
3939 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3940 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3941 false_label_p, locus),
3942 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3943 false_label_p, new_locus));
3945 else
3947 expr = build3 (COND_EXPR, void_type_node, pred,
3948 build_and_jump (true_label_p),
3949 build_and_jump (false_label_p));
3950 SET_EXPR_LOCATION (expr, locus);
3953 if (local_label)
3955 t = build1 (LABEL_EXPR, void_type_node, local_label);
3956 append_to_statement_list (t, &expr);
3959 return expr;
3962 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3963 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3964 statement, if it is the last one. Otherwise, return NULL. */
3966 static tree
3967 find_goto (tree expr)
3969 if (!expr)
3970 return NULL_TREE;
3972 if (TREE_CODE (expr) == GOTO_EXPR)
3973 return expr;
3975 if (TREE_CODE (expr) != STATEMENT_LIST)
3976 return NULL_TREE;
3978 tree_stmt_iterator i = tsi_start (expr);
3980 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3981 tsi_next (&i);
3983 if (!tsi_one_before_end_p (i))
3984 return NULL_TREE;
3986 return find_goto (tsi_stmt (i));
3989 /* Same as find_goto, except that it returns NULL if the destination
3990 is not a LABEL_DECL. */
3992 static inline tree
3993 find_goto_label (tree expr)
3995 tree dest = find_goto (expr);
3996 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3997 return dest;
3998 return NULL_TREE;
4001 /* Given a conditional expression EXPR with short-circuit boolean
4002 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4003 predicate apart into the equivalent sequence of conditionals. */
4005 static tree
4006 shortcut_cond_expr (tree expr)
4008 tree pred = TREE_OPERAND (expr, 0);
4009 tree then_ = TREE_OPERAND (expr, 1);
4010 tree else_ = TREE_OPERAND (expr, 2);
4011 tree true_label, false_label, end_label, t;
4012 tree *true_label_p;
4013 tree *false_label_p;
4014 bool emit_end, emit_false, jump_over_else;
4015 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
4016 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
4018 /* First do simple transformations. */
4019 if (!else_se)
4021 /* If there is no 'else', turn
4022 if (a && b) then c
4023 into
4024 if (a) if (b) then c. */
4025 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4027 /* Keep the original source location on the first 'if'. */
4028 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4029 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4030 /* Set the source location of the && on the second 'if'. */
4031 if (rexpr_has_location (pred))
4032 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4033 then_ = shortcut_cond_expr (expr);
4034 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4035 pred = TREE_OPERAND (pred, 0);
4036 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
4037 SET_EXPR_LOCATION (expr, locus);
4041 if (!then_se)
4043 /* If there is no 'then', turn
4044 if (a || b); else d
4045 into
4046 if (a); else if (b); else d. */
4047 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4049 /* Keep the original source location on the first 'if'. */
4050 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4051 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4052 /* Set the source location of the || on the second 'if'. */
4053 if (rexpr_has_location (pred))
4054 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4055 else_ = shortcut_cond_expr (expr);
4056 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4057 pred = TREE_OPERAND (pred, 0);
4058 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
4059 SET_EXPR_LOCATION (expr, locus);
4063 /* If we're done, great. */
4064 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
4065 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
4066 return expr;
4068 /* Otherwise we need to mess with gotos. Change
4069 if (a) c; else d;
4071 if (a); else goto no;
4072 c; goto end;
4073 no: d; end:
4074 and recursively gimplify the condition. */
4076 true_label = false_label = end_label = NULL_TREE;
4078 /* If our arms just jump somewhere, hijack those labels so we don't
4079 generate jumps to jumps. */
4081 if (tree then_goto = find_goto_label (then_))
4083 true_label = GOTO_DESTINATION (then_goto);
4084 then_ = NULL;
4085 then_se = false;
4088 if (tree else_goto = find_goto_label (else_))
4090 false_label = GOTO_DESTINATION (else_goto);
4091 else_ = NULL;
4092 else_se = false;
4095 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4096 if (true_label)
4097 true_label_p = &true_label;
4098 else
4099 true_label_p = NULL;
4101 /* The 'else' branch also needs a label if it contains interesting code. */
4102 if (false_label || else_se)
4103 false_label_p = &false_label;
4104 else
4105 false_label_p = NULL;
4107 /* If there was nothing else in our arms, just forward the label(s). */
4108 if (!then_se && !else_se)
4109 return shortcut_cond_r (pred, true_label_p, false_label_p,
4110 EXPR_LOC_OR_LOC (expr, input_location));
4112 /* If our last subexpression already has a terminal label, reuse it. */
4113 if (else_se)
4114 t = expr_last (else_);
4115 else if (then_se)
4116 t = expr_last (then_);
4117 else
4118 t = NULL;
4119 if (t && TREE_CODE (t) == LABEL_EXPR)
4120 end_label = LABEL_EXPR_LABEL (t);
4122 /* If we don't care about jumping to the 'else' branch, jump to the end
4123 if the condition is false. */
4124 if (!false_label_p)
4125 false_label_p = &end_label;
4127 /* We only want to emit these labels if we aren't hijacking them. */
4128 emit_end = (end_label == NULL_TREE);
4129 emit_false = (false_label == NULL_TREE);
4131 /* We only emit the jump over the else clause if we have to--if the
4132 then clause may fall through. Otherwise we can wind up with a
4133 useless jump and a useless label at the end of gimplified code,
4134 which will cause us to think that this conditional as a whole
4135 falls through even if it doesn't. If we then inline a function
4136 which ends with such a condition, that can cause us to issue an
4137 inappropriate warning about control reaching the end of a
4138 non-void function. */
4139 jump_over_else = block_may_fallthru (then_);
4141 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
4142 EXPR_LOC_OR_LOC (expr, input_location));
4144 expr = NULL;
4145 append_to_statement_list (pred, &expr);
4147 append_to_statement_list (then_, &expr);
4148 if (else_se)
4150 if (jump_over_else)
4152 tree last = expr_last (expr);
4153 t = build_and_jump (&end_label);
4154 if (rexpr_has_location (last))
4155 SET_EXPR_LOCATION (t, rexpr_location (last));
4156 append_to_statement_list (t, &expr);
4158 if (emit_false)
4160 t = build1 (LABEL_EXPR, void_type_node, false_label);
4161 append_to_statement_list (t, &expr);
4163 append_to_statement_list (else_, &expr);
4165 if (emit_end && end_label)
4167 t = build1 (LABEL_EXPR, void_type_node, end_label);
4168 append_to_statement_list (t, &expr);
4171 return expr;
4174 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4176 tree
4177 gimple_boolify (tree expr)
4179 tree type = TREE_TYPE (expr);
4180 location_t loc = EXPR_LOCATION (expr);
4182 if (TREE_CODE (expr) == NE_EXPR
4183 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
4184 && integer_zerop (TREE_OPERAND (expr, 1)))
4186 tree call = TREE_OPERAND (expr, 0);
4187 tree fn = get_callee_fndecl (call);
4189 /* For __builtin_expect ((long) (x), y) recurse into x as well
4190 if x is truth_value_p. */
4191 if (fn
4192 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
4193 && call_expr_nargs (call) == 2)
4195 tree arg = CALL_EXPR_ARG (call, 0);
4196 if (arg)
4198 if (TREE_CODE (arg) == NOP_EXPR
4199 && TREE_TYPE (arg) == TREE_TYPE (call))
4200 arg = TREE_OPERAND (arg, 0);
4201 if (truth_value_p (TREE_CODE (arg)))
4203 arg = gimple_boolify (arg);
4204 CALL_EXPR_ARG (call, 0)
4205 = fold_convert_loc (loc, TREE_TYPE (call), arg);
4211 switch (TREE_CODE (expr))
4213 case TRUTH_AND_EXPR:
4214 case TRUTH_OR_EXPR:
4215 case TRUTH_XOR_EXPR:
4216 case TRUTH_ANDIF_EXPR:
4217 case TRUTH_ORIF_EXPR:
4218 /* Also boolify the arguments of truth exprs. */
4219 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
4220 /* FALLTHRU */
4222 case TRUTH_NOT_EXPR:
4223 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4225 /* These expressions always produce boolean results. */
4226 if (TREE_CODE (type) != BOOLEAN_TYPE)
4227 TREE_TYPE (expr) = boolean_type_node;
4228 return expr;
4230 case ANNOTATE_EXPR:
4231 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4233 case annot_expr_ivdep_kind:
4234 case annot_expr_unroll_kind:
4235 case annot_expr_no_vector_kind:
4236 case annot_expr_vector_kind:
4237 case annot_expr_parallel_kind:
4238 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4239 if (TREE_CODE (type) != BOOLEAN_TYPE)
4240 TREE_TYPE (expr) = boolean_type_node;
4241 return expr;
4242 default:
4243 gcc_unreachable ();
4246 default:
4247 if (COMPARISON_CLASS_P (expr))
4249 /* There expressions always prduce boolean results. */
4250 if (TREE_CODE (type) != BOOLEAN_TYPE)
4251 TREE_TYPE (expr) = boolean_type_node;
4252 return expr;
4254 /* Other expressions that get here must have boolean values, but
4255 might need to be converted to the appropriate mode. */
4256 if (TREE_CODE (type) == BOOLEAN_TYPE)
4257 return expr;
4258 return fold_convert_loc (loc, boolean_type_node, expr);
4262 /* Given a conditional expression *EXPR_P without side effects, gimplify
4263 its operands. New statements are inserted to PRE_P. */
4265 static enum gimplify_status
4266 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4268 tree expr = *expr_p, cond;
4269 enum gimplify_status ret, tret;
4270 enum tree_code code;
4272 cond = gimple_boolify (COND_EXPR_COND (expr));
4274 /* We need to handle && and || specially, as their gimplification
4275 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4276 code = TREE_CODE (cond);
4277 if (code == TRUTH_ANDIF_EXPR)
4278 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4279 else if (code == TRUTH_ORIF_EXPR)
4280 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4281 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_val, fb_rvalue);
4282 COND_EXPR_COND (*expr_p) = cond;
4284 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4285 is_gimple_val, fb_rvalue);
4286 ret = MIN (ret, tret);
4287 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4288 is_gimple_val, fb_rvalue);
4290 return MIN (ret, tret);
4293 /* Return true if evaluating EXPR could trap.
4294 EXPR is GENERIC, while tree_could_trap_p can be called
4295 only on GIMPLE. */
4297 bool
4298 generic_expr_could_trap_p (tree expr)
4300 unsigned i, n;
4302 if (!expr || is_gimple_val (expr))
4303 return false;
4305 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4306 return true;
4308 n = TREE_OPERAND_LENGTH (expr);
4309 for (i = 0; i < n; i++)
4310 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4311 return true;
4313 return false;
4316 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4317 into
4319 if (p) if (p)
4320 t1 = a; a;
4321 else or else
4322 t1 = b; b;
4325 The second form is used when *EXPR_P is of type void.
4327 PRE_P points to the list where side effects that must happen before
4328 *EXPR_P should be stored. */
4330 static enum gimplify_status
4331 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4333 tree expr = *expr_p;
4334 tree type = TREE_TYPE (expr);
4335 location_t loc = EXPR_LOCATION (expr);
4336 tree tmp, arm1, arm2;
4337 enum gimplify_status ret;
4338 tree label_true, label_false, label_cont;
4339 bool have_then_clause_p, have_else_clause_p;
4340 gcond *cond_stmt;
4341 enum tree_code pred_code;
4342 gimple_seq seq = NULL;
4344 /* If this COND_EXPR has a value, copy the values into a temporary within
4345 the arms. */
4346 if (!VOID_TYPE_P (type))
4348 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4349 tree result;
4351 /* If either an rvalue is ok or we do not require an lvalue, create the
4352 temporary. But we cannot do that if the type is addressable. */
4353 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4354 && !TREE_ADDRESSABLE (type))
4356 if (gimplify_ctxp->allow_rhs_cond_expr
4357 /* If either branch has side effects or could trap, it can't be
4358 evaluated unconditionally. */
4359 && !TREE_SIDE_EFFECTS (then_)
4360 && !generic_expr_could_trap_p (then_)
4361 && !TREE_SIDE_EFFECTS (else_)
4362 && !generic_expr_could_trap_p (else_))
4363 return gimplify_pure_cond_expr (expr_p, pre_p);
4365 tmp = create_tmp_var (type, "iftmp");
4366 result = tmp;
4369 /* Otherwise, only create and copy references to the values. */
4370 else
4372 type = build_pointer_type (type);
4374 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4375 then_ = build_fold_addr_expr_loc (loc, then_);
4377 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4378 else_ = build_fold_addr_expr_loc (loc, else_);
4380 expr
4381 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4383 tmp = create_tmp_var (type, "iftmp");
4384 result = build_simple_mem_ref_loc (loc, tmp);
4387 /* Build the new then clause, `tmp = then_;'. But don't build the
4388 assignment if the value is void; in C++ it can be if it's a throw. */
4389 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4390 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4392 /* Similarly, build the new else clause, `tmp = else_;'. */
4393 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4394 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4396 TREE_TYPE (expr) = void_type_node;
4397 recalculate_side_effects (expr);
4399 /* Move the COND_EXPR to the prequeue. */
4400 gimplify_stmt (&expr, pre_p);
4402 *expr_p = result;
4403 return GS_ALL_DONE;
4406 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4407 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4408 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4409 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4411 /* Make sure the condition has BOOLEAN_TYPE. */
4412 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4414 /* Break apart && and || conditions. */
4415 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4416 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4418 expr = shortcut_cond_expr (expr);
4420 if (expr != *expr_p)
4422 *expr_p = expr;
4424 /* We can't rely on gimplify_expr to re-gimplify the expanded
4425 form properly, as cleanups might cause the target labels to be
4426 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4427 set up a conditional context. */
4428 gimple_push_condition ();
4429 gimplify_stmt (expr_p, &seq);
4430 gimple_pop_condition (pre_p);
4431 gimple_seq_add_seq (pre_p, seq);
4433 return GS_ALL_DONE;
4437 /* Now do the normal gimplification. */
4439 /* Gimplify condition. */
4440 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4441 is_gimple_condexpr_for_cond, fb_rvalue);
4442 if (ret == GS_ERROR)
4443 return GS_ERROR;
4444 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4446 gimple_push_condition ();
4448 have_then_clause_p = have_else_clause_p = false;
4449 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4450 if (label_true
4451 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4452 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4453 have different locations, otherwise we end up with incorrect
4454 location information on the branches. */
4455 && (optimize
4456 || !EXPR_HAS_LOCATION (expr)
4457 || !rexpr_has_location (label_true)
4458 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4460 have_then_clause_p = true;
4461 label_true = GOTO_DESTINATION (label_true);
4463 else
4464 label_true = create_artificial_label (UNKNOWN_LOCATION);
4465 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4466 if (label_false
4467 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4468 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4469 have different locations, otherwise we end up with incorrect
4470 location information on the branches. */
4471 && (optimize
4472 || !EXPR_HAS_LOCATION (expr)
4473 || !rexpr_has_location (label_false)
4474 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4476 have_else_clause_p = true;
4477 label_false = GOTO_DESTINATION (label_false);
4479 else
4480 label_false = create_artificial_label (UNKNOWN_LOCATION);
4482 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4483 &arm2);
4484 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4485 label_false);
4486 gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
4487 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4488 gimplify_seq_add_stmt (&seq, cond_stmt);
4489 gimple_stmt_iterator gsi = gsi_last (seq);
4490 maybe_fold_stmt (&gsi);
4492 label_cont = NULL_TREE;
4493 if (!have_then_clause_p)
4495 /* For if (...) {} else { code; } put label_true after
4496 the else block. */
4497 if (TREE_OPERAND (expr, 1) == NULL_TREE
4498 && !have_else_clause_p
4499 && TREE_OPERAND (expr, 2) != NULL_TREE)
4501 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4502 handling that label_cont == label_true can be only reached
4503 through fallthrough from { code; }. */
4504 if (integer_zerop (COND_EXPR_COND (expr)))
4505 UNUSED_LABEL_P (label_true) = 1;
4506 label_cont = label_true;
4508 else
4510 bool then_side_effects
4511 = (TREE_OPERAND (expr, 1)
4512 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
4513 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4514 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4515 /* For if (...) { code; } else {} or
4516 if (...) { code; } else goto label; or
4517 if (...) { code; return; } else { ... }
4518 label_cont isn't needed. */
4519 if (!have_else_clause_p
4520 && TREE_OPERAND (expr, 2) != NULL_TREE
4521 && gimple_seq_may_fallthru (seq))
4523 gimple *g;
4524 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4526 /* For if (0) { non-side-effect-code } else { code }
4527 tell -Wimplicit-fallthrough handling that label_cont can
4528 be only reached through fallthrough from { code }. */
4529 if (integer_zerop (COND_EXPR_COND (expr)))
4531 UNUSED_LABEL_P (label_true) = 1;
4532 if (!then_side_effects)
4533 UNUSED_LABEL_P (label_cont) = 1;
4536 g = gimple_build_goto (label_cont);
4538 /* GIMPLE_COND's are very low level; they have embedded
4539 gotos. This particular embedded goto should not be marked
4540 with the location of the original COND_EXPR, as it would
4541 correspond to the COND_EXPR's condition, not the ELSE or the
4542 THEN arms. To avoid marking it with the wrong location, flag
4543 it as "no location". */
4544 gimple_set_do_not_emit_location (g);
4546 gimplify_seq_add_stmt (&seq, g);
4550 if (!have_else_clause_p)
4552 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4553 tell -Wimplicit-fallthrough handling that label_false can be only
4554 reached through fallthrough from { code }. */
4555 if (integer_nonzerop (COND_EXPR_COND (expr))
4556 && (TREE_OPERAND (expr, 2) == NULL_TREE
4557 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
4558 UNUSED_LABEL_P (label_false) = 1;
4559 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4560 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4562 if (label_cont)
4563 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4565 gimple_pop_condition (pre_p);
4566 gimple_seq_add_seq (pre_p, seq);
4568 if (ret == GS_ERROR)
4569 ; /* Do nothing. */
4570 else if (have_then_clause_p || have_else_clause_p)
4571 ret = GS_ALL_DONE;
4572 else
4574 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4575 expr = TREE_OPERAND (expr, 0);
4576 gimplify_stmt (&expr, pre_p);
4579 *expr_p = NULL;
4580 return ret;
4583 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4584 to be marked addressable.
4586 We cannot rely on such an expression being directly markable if a temporary
4587 has been created by the gimplification. In this case, we create another
4588 temporary and initialize it with a copy, which will become a store after we
4589 mark it addressable. This can happen if the front-end passed us something
4590 that it could not mark addressable yet, like a Fortran pass-by-reference
4591 parameter (int) floatvar. */
4593 static void
4594 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4596 while (handled_component_p (*expr_p))
4597 expr_p = &TREE_OPERAND (*expr_p, 0);
4599 /* Do not allow an SSA name as the temporary. */
4600 if (is_gimple_reg (*expr_p))
4601 *expr_p = internal_get_tmp_var (*expr_p, seq_p, NULL, false, false, true);
4604 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4605 a call to __builtin_memcpy. */
4607 static enum gimplify_status
4608 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4609 gimple_seq *seq_p)
4611 tree t, to, to_ptr, from, from_ptr;
4612 gcall *gs;
4613 location_t loc = EXPR_LOCATION (*expr_p);
4615 to = TREE_OPERAND (*expr_p, 0);
4616 from = TREE_OPERAND (*expr_p, 1);
4618 /* Mark the RHS addressable. Beware that it may not be possible to do so
4619 directly if a temporary has been created by the gimplification. */
4620 prepare_gimple_addressable (&from, seq_p);
4622 mark_addressable (from);
4623 from_ptr = build_fold_addr_expr_loc (loc, from);
4624 gimplify_arg (&from_ptr, seq_p, loc);
4626 mark_addressable (to);
4627 to_ptr = build_fold_addr_expr_loc (loc, to);
4628 gimplify_arg (&to_ptr, seq_p, loc);
4630 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4632 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4633 gimple_call_set_alloca_for_var (gs, true);
4635 if (want_value)
4637 /* tmp = memcpy() */
4638 t = create_tmp_var (TREE_TYPE (to_ptr));
4639 gimple_call_set_lhs (gs, t);
4640 gimplify_seq_add_stmt (seq_p, gs);
4642 *expr_p = build_simple_mem_ref (t);
4643 return GS_ALL_DONE;
4646 gimplify_seq_add_stmt (seq_p, gs);
4647 *expr_p = NULL;
4648 return GS_ALL_DONE;
4651 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4652 a call to __builtin_memset. In this case we know that the RHS is
4653 a CONSTRUCTOR with an empty element list. */
4655 static enum gimplify_status
4656 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4657 gimple_seq *seq_p)
4659 tree t, from, to, to_ptr;
4660 gcall *gs;
4661 location_t loc = EXPR_LOCATION (*expr_p);
4663 /* Assert our assumptions, to abort instead of producing wrong code
4664 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4665 not be immediately exposed. */
4666 from = TREE_OPERAND (*expr_p, 1);
4667 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4668 from = TREE_OPERAND (from, 0);
4670 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4671 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4673 /* Now proceed. */
4674 to = TREE_OPERAND (*expr_p, 0);
4676 to_ptr = build_fold_addr_expr_loc (loc, to);
4677 gimplify_arg (&to_ptr, seq_p, loc);
4678 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4680 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4682 if (want_value)
4684 /* tmp = memset() */
4685 t = create_tmp_var (TREE_TYPE (to_ptr));
4686 gimple_call_set_lhs (gs, t);
4687 gimplify_seq_add_stmt (seq_p, gs);
4689 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4690 return GS_ALL_DONE;
4693 gimplify_seq_add_stmt (seq_p, gs);
4694 *expr_p = NULL;
4695 return GS_ALL_DONE;
4698 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4699 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4700 assignment. Return non-null if we detect a potential overlap. */
4702 struct gimplify_init_ctor_preeval_data
4704 /* The base decl of the lhs object. May be NULL, in which case we
4705 have to assume the lhs is indirect. */
4706 tree lhs_base_decl;
4708 /* The alias set of the lhs object. */
4709 alias_set_type lhs_alias_set;
4712 static tree
4713 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4715 struct gimplify_init_ctor_preeval_data *data
4716 = (struct gimplify_init_ctor_preeval_data *) xdata;
4717 tree t = *tp;
4719 /* If we find the base object, obviously we have overlap. */
4720 if (data->lhs_base_decl == t)
4721 return t;
4723 /* If the constructor component is indirect, determine if we have a
4724 potential overlap with the lhs. The only bits of information we
4725 have to go on at this point are addressability and alias sets. */
4726 if ((INDIRECT_REF_P (t)
4727 || TREE_CODE (t) == MEM_REF)
4728 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4729 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4730 return t;
4732 /* If the constructor component is a call, determine if it can hide a
4733 potential overlap with the lhs through an INDIRECT_REF like above.
4734 ??? Ugh - this is completely broken. In fact this whole analysis
4735 doesn't look conservative. */
4736 if (TREE_CODE (t) == CALL_EXPR)
4738 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4740 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4741 if (POINTER_TYPE_P (TREE_VALUE (type))
4742 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4743 && alias_sets_conflict_p (data->lhs_alias_set,
4744 get_alias_set
4745 (TREE_TYPE (TREE_VALUE (type)))))
4746 return t;
4749 if (IS_TYPE_OR_DECL_P (t))
4750 *walk_subtrees = 0;
4751 return NULL;
4754 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4755 force values that overlap with the lhs (as described by *DATA)
4756 into temporaries. */
4758 static void
4759 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4760 struct gimplify_init_ctor_preeval_data *data)
4762 enum gimplify_status one;
4764 /* If the value is constant, then there's nothing to pre-evaluate. */
4765 if (TREE_CONSTANT (*expr_p))
4767 /* Ensure it does not have side effects, it might contain a reference to
4768 the object we're initializing. */
4769 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4770 return;
4773 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4774 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4775 return;
4777 /* Recurse for nested constructors. */
4778 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4780 unsigned HOST_WIDE_INT ix;
4781 constructor_elt *ce;
4782 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4784 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4785 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4787 return;
4790 /* If this is a variable sized type, we must remember the size. */
4791 maybe_with_size_expr (expr_p);
4793 /* Gimplify the constructor element to something appropriate for the rhs
4794 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4795 the gimplifier will consider this a store to memory. Doing this
4796 gimplification now means that we won't have to deal with complicated
4797 language-specific trees, nor trees like SAVE_EXPR that can induce
4798 exponential search behavior. */
4799 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4800 if (one == GS_ERROR)
4802 *expr_p = NULL;
4803 return;
4806 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4807 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4808 always be true for all scalars, since is_gimple_mem_rhs insists on a
4809 temporary variable for them. */
4810 if (DECL_P (*expr_p))
4811 return;
4813 /* If this is of variable size, we have no choice but to assume it doesn't
4814 overlap since we can't make a temporary for it. */
4815 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4816 return;
4818 /* Otherwise, we must search for overlap ... */
4819 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4820 return;
4822 /* ... and if found, force the value into a temporary. */
4823 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4826 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4827 a RANGE_EXPR in a CONSTRUCTOR for an array.
4829 var = lower;
4830 loop_entry:
4831 object[var] = value;
4832 if (var == upper)
4833 goto loop_exit;
4834 var = var + 1;
4835 goto loop_entry;
4836 loop_exit:
4838 We increment var _after_ the loop exit check because we might otherwise
4839 fail if upper == TYPE_MAX_VALUE (type for upper).
4841 Note that we never have to deal with SAVE_EXPRs here, because this has
4842 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4844 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4845 gimple_seq *, bool);
4847 static void
4848 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4849 tree value, tree array_elt_type,
4850 gimple_seq *pre_p, bool cleared)
4852 tree loop_entry_label, loop_exit_label, fall_thru_label;
4853 tree var, var_type, cref, tmp;
4855 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4856 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4857 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4859 /* Create and initialize the index variable. */
4860 var_type = TREE_TYPE (upper);
4861 var = create_tmp_var (var_type);
4862 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4864 /* Add the loop entry label. */
4865 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4867 /* Build the reference. */
4868 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4869 var, NULL_TREE, NULL_TREE);
4871 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4872 the store. Otherwise just assign value to the reference. */
4874 if (TREE_CODE (value) == CONSTRUCTOR)
4875 /* NB we might have to call ourself recursively through
4876 gimplify_init_ctor_eval if the value is a constructor. */
4877 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4878 pre_p, cleared);
4879 else
4881 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
4882 != GS_ERROR)
4883 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4886 /* We exit the loop when the index var is equal to the upper bound. */
4887 gimplify_seq_add_stmt (pre_p,
4888 gimple_build_cond (EQ_EXPR, var, upper,
4889 loop_exit_label, fall_thru_label));
4891 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4893 /* Otherwise, increment the index var... */
4894 tmp = build2 (PLUS_EXPR, var_type, var,
4895 fold_convert (var_type, integer_one_node));
4896 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4898 /* ...and jump back to the loop entry. */
4899 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4901 /* Add the loop exit label. */
4902 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4905 /* A subroutine of gimplify_init_constructor. Generate individual
4906 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4907 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4908 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4909 zeroed first. */
4911 static void
4912 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4913 gimple_seq *pre_p, bool cleared)
4915 tree array_elt_type = NULL;
4916 unsigned HOST_WIDE_INT ix;
4917 tree purpose, value;
4919 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4920 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4922 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4924 tree cref;
4926 /* NULL values are created above for gimplification errors. */
4927 if (value == NULL)
4928 continue;
4930 if (cleared && initializer_zerop (value))
4931 continue;
4933 /* ??? Here's to hoping the front end fills in all of the indices,
4934 so we don't have to figure out what's missing ourselves. */
4935 gcc_assert (purpose);
4937 /* Skip zero-sized fields, unless value has side-effects. This can
4938 happen with calls to functions returning a empty type, which
4939 we shouldn't discard. As a number of downstream passes don't
4940 expect sets of empty type fields, we rely on the gimplification of
4941 the MODIFY_EXPR we make below to drop the assignment statement. */
4942 if (!TREE_SIDE_EFFECTS (value)
4943 && TREE_CODE (purpose) == FIELD_DECL
4944 && is_empty_type (TREE_TYPE (purpose)))
4945 continue;
4947 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4948 whole range. */
4949 if (TREE_CODE (purpose) == RANGE_EXPR)
4951 tree lower = TREE_OPERAND (purpose, 0);
4952 tree upper = TREE_OPERAND (purpose, 1);
4954 /* If the lower bound is equal to upper, just treat it as if
4955 upper was the index. */
4956 if (simple_cst_equal (lower, upper))
4957 purpose = upper;
4958 else
4960 gimplify_init_ctor_eval_range (object, lower, upper, value,
4961 array_elt_type, pre_p, cleared);
4962 continue;
4966 if (array_elt_type)
4968 /* Do not use bitsizetype for ARRAY_REF indices. */
4969 if (TYPE_DOMAIN (TREE_TYPE (object)))
4970 purpose
4971 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4972 purpose);
4973 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4974 purpose, NULL_TREE, NULL_TREE);
4976 else
4978 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4979 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4980 unshare_expr (object), purpose, NULL_TREE);
4983 if (TREE_CODE (value) == CONSTRUCTOR
4984 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4985 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4986 pre_p, cleared);
4987 else
4989 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4990 gimplify_and_add (init, pre_p);
4991 ggc_free (init);
4996 /* Return the appropriate RHS predicate for this LHS. */
4998 gimple_predicate
4999 rhs_predicate_for (tree lhs)
5001 if (is_gimple_reg (lhs))
5002 return is_gimple_reg_rhs_or_call;
5003 else
5004 return is_gimple_mem_rhs_or_call;
5007 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5008 before the LHS has been gimplified. */
5010 static gimple_predicate
5011 initial_rhs_predicate_for (tree lhs)
5013 if (is_gimple_reg_type (TREE_TYPE (lhs)))
5014 return is_gimple_reg_rhs_or_call;
5015 else
5016 return is_gimple_mem_rhs_or_call;
5019 /* Gimplify a C99 compound literal expression. This just means adding
5020 the DECL_EXPR before the current statement and using its anonymous
5021 decl instead. */
5023 static enum gimplify_status
5024 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
5025 bool (*gimple_test_f) (tree),
5026 fallback_t fallback)
5028 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
5029 tree decl = DECL_EXPR_DECL (decl_s);
5030 tree init = DECL_INITIAL (decl);
5031 /* Mark the decl as addressable if the compound literal
5032 expression is addressable now, otherwise it is marked too late
5033 after we gimplify the initialization expression. */
5034 if (TREE_ADDRESSABLE (*expr_p))
5035 TREE_ADDRESSABLE (decl) = 1;
5036 /* Otherwise, if we don't need an lvalue and have a literal directly
5037 substitute it. Check if it matches the gimple predicate, as
5038 otherwise we'd generate a new temporary, and we can as well just
5039 use the decl we already have. */
5040 else if (!TREE_ADDRESSABLE (decl)
5041 && !TREE_THIS_VOLATILE (decl)
5042 && init
5043 && (fallback & fb_lvalue) == 0
5044 && gimple_test_f (init))
5046 *expr_p = init;
5047 return GS_OK;
5050 /* If the decl is not addressable, then it is being used in some
5051 expression or on the right hand side of a statement, and it can
5052 be put into a readonly data section. */
5053 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
5054 TREE_READONLY (decl) = 1;
5056 /* This decl isn't mentioned in the enclosing block, so add it to the
5057 list of temps. FIXME it seems a bit of a kludge to say that
5058 anonymous artificial vars aren't pushed, but everything else is. */
5059 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
5060 gimple_add_tmp_var (decl);
5062 gimplify_and_add (decl_s, pre_p);
5063 *expr_p = decl;
5064 return GS_OK;
5067 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5068 return a new CONSTRUCTOR if something changed. */
5070 static tree
5071 optimize_compound_literals_in_ctor (tree orig_ctor)
5073 tree ctor = orig_ctor;
5074 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
5075 unsigned int idx, num = vec_safe_length (elts);
5077 for (idx = 0; idx < num; idx++)
5079 tree value = (*elts)[idx].value;
5080 tree newval = value;
5081 if (TREE_CODE (value) == CONSTRUCTOR)
5082 newval = optimize_compound_literals_in_ctor (value);
5083 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
5085 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
5086 tree decl = DECL_EXPR_DECL (decl_s);
5087 tree init = DECL_INITIAL (decl);
5089 if (!TREE_ADDRESSABLE (value)
5090 && !TREE_ADDRESSABLE (decl)
5091 && init
5092 && TREE_CODE (init) == CONSTRUCTOR)
5093 newval = optimize_compound_literals_in_ctor (init);
5095 if (newval == value)
5096 continue;
5098 if (ctor == orig_ctor)
5100 ctor = copy_node (orig_ctor);
5101 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
5102 elts = CONSTRUCTOR_ELTS (ctor);
5104 (*elts)[idx].value = newval;
5106 return ctor;
5109 /* A subroutine of gimplify_modify_expr. Break out elements of a
5110 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5112 Note that we still need to clear any elements that don't have explicit
5113 initializers, so if not all elements are initialized we keep the
5114 original MODIFY_EXPR, we just remove all of the constructor elements.
5116 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5117 GS_ERROR if we would have to create a temporary when gimplifying
5118 this constructor. Otherwise, return GS_OK.
5120 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5122 static enum gimplify_status
5123 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5124 bool want_value, bool notify_temp_creation)
5126 tree object, ctor, type;
5127 enum gimplify_status ret;
5128 vec<constructor_elt, va_gc> *elts;
5129 bool cleared = false;
5130 bool is_empty_ctor = false;
5131 bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
5133 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
5135 if (!notify_temp_creation)
5137 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5138 is_gimple_lvalue, fb_lvalue);
5139 if (ret == GS_ERROR)
5140 return ret;
5143 object = TREE_OPERAND (*expr_p, 0);
5144 ctor = TREE_OPERAND (*expr_p, 1)
5145 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
5146 type = TREE_TYPE (ctor);
5147 elts = CONSTRUCTOR_ELTS (ctor);
5148 ret = GS_ALL_DONE;
5150 switch (TREE_CODE (type))
5152 case RECORD_TYPE:
5153 case UNION_TYPE:
5154 case QUAL_UNION_TYPE:
5155 case ARRAY_TYPE:
5157 /* Use readonly data for initializers of this or smaller size
5158 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5159 ratio. */
5160 const HOST_WIDE_INT min_unique_size = 64;
5161 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5162 is smaller than this, use readonly data. */
5163 const int unique_nonzero_ratio = 8;
5164 /* True if a single access of the object must be ensured. This is the
5165 case if the target is volatile, the type is non-addressable and more
5166 than one field need to be assigned. */
5167 const bool ensure_single_access
5168 = TREE_THIS_VOLATILE (object)
5169 && !TREE_ADDRESSABLE (type)
5170 && vec_safe_length (elts) > 1;
5171 struct gimplify_init_ctor_preeval_data preeval_data;
5172 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
5173 HOST_WIDE_INT num_unique_nonzero_elements;
5174 bool complete_p, valid_const_initializer;
5176 /* Aggregate types must lower constructors to initialization of
5177 individual elements. The exception is that a CONSTRUCTOR node
5178 with no elements indicates zero-initialization of the whole. */
5179 if (vec_safe_is_empty (elts))
5181 if (notify_temp_creation)
5182 return GS_OK;
5184 /* The var will be initialized and so appear on lhs of
5185 assignment, it can't be TREE_READONLY anymore. */
5186 if (VAR_P (object))
5187 TREE_READONLY (object) = 0;
5189 is_empty_ctor = true;
5190 break;
5193 /* Fetch information about the constructor to direct later processing.
5194 We might want to make static versions of it in various cases, and
5195 can only do so if it known to be a valid constant initializer. */
5196 valid_const_initializer
5197 = categorize_ctor_elements (ctor, &num_nonzero_elements,
5198 &num_unique_nonzero_elements,
5199 &num_ctor_elements, &complete_p);
5201 /* If a const aggregate variable is being initialized, then it
5202 should never be a lose to promote the variable to be static. */
5203 if (valid_const_initializer
5204 && num_nonzero_elements > 1
5205 && TREE_READONLY (object)
5206 && VAR_P (object)
5207 && !DECL_REGISTER (object)
5208 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
5209 /* For ctors that have many repeated nonzero elements
5210 represented through RANGE_EXPRs, prefer initializing
5211 those through runtime loops over copies of large amounts
5212 of data from readonly data section. */
5213 && (num_unique_nonzero_elements
5214 > num_nonzero_elements / unique_nonzero_ratio
5215 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
5216 <= (unsigned HOST_WIDE_INT) min_unique_size)))
5218 if (notify_temp_creation)
5219 return GS_ERROR;
5221 DECL_INITIAL (object) = ctor;
5222 TREE_STATIC (object) = 1;
5223 if (!DECL_NAME (object))
5224 DECL_NAME (object) = create_tmp_var_name ("C");
5225 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
5227 /* ??? C++ doesn't automatically append a .<number> to the
5228 assembler name, and even when it does, it looks at FE private
5229 data structures to figure out what that number should be,
5230 which are not set for this variable. I suppose this is
5231 important for local statics for inline functions, which aren't
5232 "local" in the object file sense. So in order to get a unique
5233 TU-local symbol, we must invoke the lhd version now. */
5234 lhd_set_decl_assembler_name (object);
5236 *expr_p = NULL_TREE;
5237 break;
5240 /* The var will be initialized and so appear on lhs of
5241 assignment, it can't be TREE_READONLY anymore. */
5242 if (VAR_P (object) && !notify_temp_creation)
5243 TREE_READONLY (object) = 0;
5245 /* If there are "lots" of initialized elements, even discounting
5246 those that are not address constants (and thus *must* be
5247 computed at runtime), then partition the constructor into
5248 constant and non-constant parts. Block copy the constant
5249 parts in, then generate code for the non-constant parts. */
5250 /* TODO. There's code in cp/typeck.cc to do this. */
5252 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
5253 /* store_constructor will ignore the clearing of variable-sized
5254 objects. Initializers for such objects must explicitly set
5255 every field that needs to be set. */
5256 cleared = false;
5257 else if (!complete_p)
5258 /* If the constructor isn't complete, clear the whole object
5259 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5261 ??? This ought not to be needed. For any element not present
5262 in the initializer, we should simply set them to zero. Except
5263 we'd need to *find* the elements that are not present, and that
5264 requires trickery to avoid quadratic compile-time behavior in
5265 large cases or excessive memory use in small cases. */
5266 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5267 else if (num_ctor_elements - num_nonzero_elements
5268 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5269 && num_nonzero_elements < num_ctor_elements / 4)
5270 /* If there are "lots" of zeros, it's more efficient to clear
5271 the memory and then set the nonzero elements. */
5272 cleared = true;
5273 else if (ensure_single_access && num_nonzero_elements == 0)
5274 /* If a single access to the target must be ensured and all elements
5275 are zero, then it's optimal to clear whatever their number. */
5276 cleared = true;
5277 else
5278 cleared = false;
5280 /* If there are "lots" of initialized elements, and all of them
5281 are valid address constants, then the entire initializer can
5282 be dropped to memory, and then memcpy'd out. Don't do this
5283 for sparse arrays, though, as it's more efficient to follow
5284 the standard CONSTRUCTOR behavior of memset followed by
5285 individual element initialization. Also don't do this for small
5286 all-zero initializers (which aren't big enough to merit
5287 clearing), and don't try to make bitwise copies of
5288 TREE_ADDRESSABLE types. */
5289 if (valid_const_initializer
5290 && complete_p
5291 && !(cleared || num_nonzero_elements == 0)
5292 && !TREE_ADDRESSABLE (type))
5294 HOST_WIDE_INT size = int_size_in_bytes (type);
5295 unsigned int align;
5297 /* ??? We can still get unbounded array types, at least
5298 from the C++ front end. This seems wrong, but attempt
5299 to work around it for now. */
5300 if (size < 0)
5302 size = int_size_in_bytes (TREE_TYPE (object));
5303 if (size >= 0)
5304 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5307 /* Find the maximum alignment we can assume for the object. */
5308 /* ??? Make use of DECL_OFFSET_ALIGN. */
5309 if (DECL_P (object))
5310 align = DECL_ALIGN (object);
5311 else
5312 align = TYPE_ALIGN (type);
5314 /* Do a block move either if the size is so small as to make
5315 each individual move a sub-unit move on average, or if it
5316 is so large as to make individual moves inefficient. */
5317 if (size > 0
5318 && num_nonzero_elements > 1
5319 /* For ctors that have many repeated nonzero elements
5320 represented through RANGE_EXPRs, prefer initializing
5321 those through runtime loops over copies of large amounts
5322 of data from readonly data section. */
5323 && (num_unique_nonzero_elements
5324 > num_nonzero_elements / unique_nonzero_ratio
5325 || size <= min_unique_size)
5326 && (size < num_nonzero_elements
5327 || !can_move_by_pieces (size, align)))
5329 if (notify_temp_creation)
5330 return GS_ERROR;
5332 walk_tree (&ctor, force_labels_r, NULL, NULL);
5333 ctor = tree_output_constant_def (ctor);
5334 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5335 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5336 TREE_OPERAND (*expr_p, 1) = ctor;
5338 /* This is no longer an assignment of a CONSTRUCTOR, but
5339 we still may have processing to do on the LHS. So
5340 pretend we didn't do anything here to let that happen. */
5341 return GS_UNHANDLED;
5345 /* If a single access to the target must be ensured and there are
5346 nonzero elements or the zero elements are not assigned en masse,
5347 initialize the target from a temporary. */
5348 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5350 if (notify_temp_creation)
5351 return GS_ERROR;
5353 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5354 TREE_OPERAND (*expr_p, 0) = temp;
5355 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5356 *expr_p,
5357 build2 (MODIFY_EXPR, void_type_node,
5358 object, temp));
5359 return GS_OK;
5362 if (notify_temp_creation)
5363 return GS_OK;
5365 /* If there are nonzero elements and if needed, pre-evaluate to capture
5366 elements overlapping with the lhs into temporaries. We must do this
5367 before clearing to fetch the values before they are zeroed-out. */
5368 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5370 preeval_data.lhs_base_decl = get_base_address (object);
5371 if (!DECL_P (preeval_data.lhs_base_decl))
5372 preeval_data.lhs_base_decl = NULL;
5373 preeval_data.lhs_alias_set = get_alias_set (object);
5375 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5376 pre_p, post_p, &preeval_data);
5379 bool ctor_has_side_effects_p
5380 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5382 if (cleared)
5384 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5385 Note that we still have to gimplify, in order to handle the
5386 case of variable sized types. Avoid shared tree structures. */
5387 CONSTRUCTOR_ELTS (ctor) = NULL;
5388 TREE_SIDE_EFFECTS (ctor) = 0;
5389 object = unshare_expr (object);
5390 gimplify_stmt (expr_p, pre_p);
5393 /* If we have not block cleared the object, or if there are nonzero
5394 elements in the constructor, or if the constructor has side effects,
5395 add assignments to the individual scalar fields of the object. */
5396 if (!cleared
5397 || num_nonzero_elements > 0
5398 || ctor_has_side_effects_p)
5399 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5401 *expr_p = NULL_TREE;
5403 break;
5405 case COMPLEX_TYPE:
5407 tree r, i;
5409 if (notify_temp_creation)
5410 return GS_OK;
5412 /* Extract the real and imaginary parts out of the ctor. */
5413 gcc_assert (elts->length () == 2);
5414 r = (*elts)[0].value;
5415 i = (*elts)[1].value;
5416 if (r == NULL || i == NULL)
5418 tree zero = build_zero_cst (TREE_TYPE (type));
5419 if (r == NULL)
5420 r = zero;
5421 if (i == NULL)
5422 i = zero;
5425 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5426 represent creation of a complex value. */
5427 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5429 ctor = build_complex (type, r, i);
5430 TREE_OPERAND (*expr_p, 1) = ctor;
5432 else
5434 ctor = build2 (COMPLEX_EXPR, type, r, i);
5435 TREE_OPERAND (*expr_p, 1) = ctor;
5436 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5437 pre_p,
5438 post_p,
5439 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5440 fb_rvalue);
5443 break;
5445 case VECTOR_TYPE:
5447 unsigned HOST_WIDE_INT ix;
5448 constructor_elt *ce;
5450 if (notify_temp_creation)
5451 return GS_OK;
5453 /* Vector types use CONSTRUCTOR all the way through gimple
5454 compilation as a general initializer. */
5455 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5457 enum gimplify_status tret;
5458 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5459 fb_rvalue);
5460 if (tret == GS_ERROR)
5461 ret = GS_ERROR;
5462 else if (TREE_STATIC (ctor)
5463 && !initializer_constant_valid_p (ce->value,
5464 TREE_TYPE (ce->value)))
5465 TREE_STATIC (ctor) = 0;
5467 recompute_constructor_flags (ctor);
5469 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5470 if (TREE_CONSTANT (ctor))
5472 bool constant_p = true;
5473 tree value;
5475 /* Even when ctor is constant, it might contain non-*_CST
5476 elements, such as addresses or trapping values like
5477 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5478 in VECTOR_CST nodes. */
5479 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5480 if (!CONSTANT_CLASS_P (value))
5482 constant_p = false;
5483 break;
5486 if (constant_p)
5488 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5489 break;
5493 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5494 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5496 break;
5498 default:
5499 /* So how did we get a CONSTRUCTOR for a scalar type? */
5500 gcc_unreachable ();
5503 if (ret == GS_ERROR)
5504 return GS_ERROR;
5505 /* If we have gimplified both sides of the initializer but have
5506 not emitted an assignment, do so now. */
5507 if (*expr_p
5508 /* If the type is an empty type, we don't need to emit the
5509 assignment. */
5510 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
5512 tree lhs = TREE_OPERAND (*expr_p, 0);
5513 tree rhs = TREE_OPERAND (*expr_p, 1);
5514 if (want_value && object == lhs)
5515 lhs = unshare_expr (lhs);
5516 gassign *init = gimple_build_assign (lhs, rhs);
5517 gimplify_seq_add_stmt (pre_p, init);
5519 if (want_value)
5521 *expr_p = object;
5522 ret = GS_OK;
5524 else
5526 *expr_p = NULL;
5527 ret = GS_ALL_DONE;
5530 /* If the user requests to initialize automatic variables, we
5531 should initialize paddings inside the variable. Add a call to
5532 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5533 initialize paddings of object always to zero regardless of
5534 INIT_TYPE. Note, we will not insert this call if the aggregate
5535 variable has be completely cleared already or it's initialized
5536 with an empty constructor. We cannot insert this call if the
5537 variable is a gimple register since __builtin_clear_padding will take
5538 the address of the variable. As a result, if a long double/_Complex long
5539 double variable will be spilled into stack later, its padding cannot
5540 be cleared with __builtin_clear_padding. We should clear its padding
5541 when it is spilled into memory. */
5542 if (is_init_expr
5543 && !is_gimple_reg (object)
5544 && clear_padding_type_may_have_padding_p (type)
5545 && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
5546 || !AGGREGATE_TYPE_P (type))
5547 && is_var_need_auto_init (object))
5548 gimple_add_padding_init_for_auto_var (object, false, pre_p);
5550 return ret;
5553 /* Given a pointer value OP0, return a simplified version of an
5554 indirection through OP0, or NULL_TREE if no simplification is
5555 possible. This may only be applied to a rhs of an expression.
5556 Note that the resulting type may be different from the type pointed
5557 to in the sense that it is still compatible from the langhooks
5558 point of view. */
5560 static tree
5561 gimple_fold_indirect_ref_rhs (tree t)
5563 return gimple_fold_indirect_ref (t);
5566 /* Subroutine of gimplify_modify_expr to do simplifications of
5567 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5568 something changes. */
5570 static enum gimplify_status
5571 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5572 gimple_seq *pre_p, gimple_seq *post_p,
5573 bool want_value)
5575 enum gimplify_status ret = GS_UNHANDLED;
5576 bool changed;
5580 changed = false;
5581 switch (TREE_CODE (*from_p))
5583 case VAR_DECL:
5584 /* If we're assigning from a read-only variable initialized with
5585 a constructor and not volatile, do the direct assignment from
5586 the constructor, but only if the target is not volatile either
5587 since this latter assignment might end up being done on a per
5588 field basis. However, if the target is volatile and the type
5589 is aggregate and non-addressable, gimplify_init_constructor
5590 knows that it needs to ensure a single access to the target
5591 and it will return GS_OK only in this case. */
5592 if (TREE_READONLY (*from_p)
5593 && DECL_INITIAL (*from_p)
5594 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5595 && !TREE_THIS_VOLATILE (*from_p)
5596 && (!TREE_THIS_VOLATILE (*to_p)
5597 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5598 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5600 tree old_from = *from_p;
5601 enum gimplify_status subret;
5603 /* Move the constructor into the RHS. */
5604 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5606 /* Let's see if gimplify_init_constructor will need to put
5607 it in memory. */
5608 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5609 false, true);
5610 if (subret == GS_ERROR)
5612 /* If so, revert the change. */
5613 *from_p = old_from;
5615 else
5617 ret = GS_OK;
5618 changed = true;
5621 break;
5622 case INDIRECT_REF:
5623 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p)))
5624 /* If we have code like
5626 *(const A*)(A*)&x
5628 where the type of "x" is a (possibly cv-qualified variant
5629 of "A"), treat the entire expression as identical to "x".
5630 This kind of code arises in C++ when an object is bound
5631 to a const reference, and if "x" is a TARGET_EXPR we want
5632 to take advantage of the optimization below. But not if
5633 the type is TREE_ADDRESSABLE; then C++17 says that the
5634 TARGET_EXPR needs to be a temporary. */
5635 if (tree t
5636 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)))
5638 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5639 if (TREE_THIS_VOLATILE (t) != volatile_p)
5641 if (DECL_P (t))
5642 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5643 build_fold_addr_expr (t));
5644 if (REFERENCE_CLASS_P (t))
5645 TREE_THIS_VOLATILE (t) = volatile_p;
5647 *from_p = t;
5648 ret = GS_OK;
5649 changed = true;
5651 break;
5653 case TARGET_EXPR:
5655 /* If we are initializing something from a TARGET_EXPR, strip the
5656 TARGET_EXPR and initialize it directly, if possible. This can't
5657 be done if the initializer is void, since that implies that the
5658 temporary is set in some non-trivial way.
5660 ??? What about code that pulls out the temp and uses it
5661 elsewhere? I think that such code never uses the TARGET_EXPR as
5662 an initializer. If I'm wrong, we'll die because the temp won't
5663 have any RTL. In that case, I guess we'll need to replace
5664 references somehow. */
5665 tree init = TARGET_EXPR_INITIAL (*from_p);
5667 if (init
5668 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5669 || !TARGET_EXPR_NO_ELIDE (*from_p))
5670 && !VOID_TYPE_P (TREE_TYPE (init)))
5672 *from_p = init;
5673 ret = GS_OK;
5674 changed = true;
5677 break;
5679 case COMPOUND_EXPR:
5680 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5681 caught. */
5682 gimplify_compound_expr (from_p, pre_p, true);
5683 ret = GS_OK;
5684 changed = true;
5685 break;
5687 case CONSTRUCTOR:
5688 /* If we already made some changes, let the front end have a
5689 crack at this before we break it down. */
5690 if (ret != GS_UNHANDLED)
5691 break;
5693 /* If we're initializing from a CONSTRUCTOR, break this into
5694 individual MODIFY_EXPRs. */
5695 ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5696 false);
5697 return ret;
5699 case COND_EXPR:
5700 /* If we're assigning to a non-register type, push the assignment
5701 down into the branches. This is mandatory for ADDRESSABLE types,
5702 since we cannot generate temporaries for such, but it saves a
5703 copy in other cases as well. */
5704 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5706 /* This code should mirror the code in gimplify_cond_expr. */
5707 enum tree_code code = TREE_CODE (*expr_p);
5708 tree cond = *from_p;
5709 tree result = *to_p;
5711 ret = gimplify_expr (&result, pre_p, post_p,
5712 is_gimple_lvalue, fb_lvalue);
5713 if (ret != GS_ERROR)
5714 ret = GS_OK;
5716 /* If we are going to write RESULT more than once, clear
5717 TREE_READONLY flag, otherwise we might incorrectly promote
5718 the variable to static const and initialize it at compile
5719 time in one of the branches. */
5720 if (VAR_P (result)
5721 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5722 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5723 TREE_READONLY (result) = 0;
5724 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5725 TREE_OPERAND (cond, 1)
5726 = build2 (code, void_type_node, result,
5727 TREE_OPERAND (cond, 1));
5728 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5729 TREE_OPERAND (cond, 2)
5730 = build2 (code, void_type_node, unshare_expr (result),
5731 TREE_OPERAND (cond, 2));
5733 TREE_TYPE (cond) = void_type_node;
5734 recalculate_side_effects (cond);
5736 if (want_value)
5738 gimplify_and_add (cond, pre_p);
5739 *expr_p = unshare_expr (result);
5741 else
5742 *expr_p = cond;
5743 return ret;
5745 break;
5747 case CALL_EXPR:
5748 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5749 return slot so that we don't generate a temporary. */
5750 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5751 && aggregate_value_p (*from_p, *from_p))
5753 bool use_target;
5755 if (!(rhs_predicate_for (*to_p))(*from_p))
5756 /* If we need a temporary, *to_p isn't accurate. */
5757 use_target = false;
5758 /* It's OK to use the return slot directly unless it's an NRV. */
5759 else if (TREE_CODE (*to_p) == RESULT_DECL
5760 && DECL_NAME (*to_p) == NULL_TREE
5761 && needs_to_live_in_memory (*to_p))
5762 use_target = true;
5763 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5764 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5765 /* Don't force regs into memory. */
5766 use_target = false;
5767 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5768 /* It's OK to use the target directly if it's being
5769 initialized. */
5770 use_target = true;
5771 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5772 != INTEGER_CST)
5773 /* Always use the target and thus RSO for variable-sized types.
5774 GIMPLE cannot deal with a variable-sized assignment
5775 embedded in a call statement. */
5776 use_target = true;
5777 else if (TREE_CODE (*to_p) != SSA_NAME
5778 && (!is_gimple_variable (*to_p)
5779 || needs_to_live_in_memory (*to_p)))
5780 /* Don't use the original target if it's already addressable;
5781 if its address escapes, and the called function uses the
5782 NRV optimization, a conforming program could see *to_p
5783 change before the called function returns; see c++/19317.
5784 When optimizing, the return_slot pass marks more functions
5785 as safe after we have escape info. */
5786 use_target = false;
5787 else
5788 use_target = true;
5790 if (use_target)
5792 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5793 mark_addressable (*to_p);
5796 break;
5798 case WITH_SIZE_EXPR:
5799 /* Likewise for calls that return an aggregate of non-constant size,
5800 since we would not be able to generate a temporary at all. */
5801 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5803 *from_p = TREE_OPERAND (*from_p, 0);
5804 /* We don't change ret in this case because the
5805 WITH_SIZE_EXPR might have been added in
5806 gimplify_modify_expr, so returning GS_OK would lead to an
5807 infinite loop. */
5808 changed = true;
5810 break;
5812 /* If we're initializing from a container, push the initialization
5813 inside it. */
5814 case CLEANUP_POINT_EXPR:
5815 case BIND_EXPR:
5816 case STATEMENT_LIST:
5818 tree wrap = *from_p;
5819 tree t;
5821 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5822 fb_lvalue);
5823 if (ret != GS_ERROR)
5824 ret = GS_OK;
5826 t = voidify_wrapper_expr (wrap, *expr_p);
5827 gcc_assert (t == *expr_p);
5829 if (want_value)
5831 gimplify_and_add (wrap, pre_p);
5832 *expr_p = unshare_expr (*to_p);
5834 else
5835 *expr_p = wrap;
5836 return GS_OK;
5839 case NOP_EXPR:
5840 /* Pull out compound literal expressions from a NOP_EXPR.
5841 Those are created in the C FE to drop qualifiers during
5842 lvalue conversion. */
5843 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
5844 && tree_ssa_useless_type_conversion (*from_p))
5846 *from_p = TREE_OPERAND (*from_p, 0);
5847 ret = GS_OK;
5848 changed = true;
5850 break;
5852 case COMPOUND_LITERAL_EXPR:
5854 tree complit = TREE_OPERAND (*expr_p, 1);
5855 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5856 tree decl = DECL_EXPR_DECL (decl_s);
5857 tree init = DECL_INITIAL (decl);
5859 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5860 into struct T x = { 0, 1, 2 } if the address of the
5861 compound literal has never been taken. */
5862 if (!TREE_ADDRESSABLE (complit)
5863 && !TREE_ADDRESSABLE (decl)
5864 && init)
5866 *expr_p = copy_node (*expr_p);
5867 TREE_OPERAND (*expr_p, 1) = init;
5868 return GS_OK;
5872 default:
5873 break;
5876 while (changed);
5878 return ret;
5882 /* Return true if T looks like a valid GIMPLE statement. */
5884 static bool
5885 is_gimple_stmt (tree t)
5887 const enum tree_code code = TREE_CODE (t);
5889 switch (code)
5891 case NOP_EXPR:
5892 /* The only valid NOP_EXPR is the empty statement. */
5893 return IS_EMPTY_STMT (t);
5895 case BIND_EXPR:
5896 case COND_EXPR:
5897 /* These are only valid if they're void. */
5898 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5900 case SWITCH_EXPR:
5901 case GOTO_EXPR:
5902 case RETURN_EXPR:
5903 case LABEL_EXPR:
5904 case CASE_LABEL_EXPR:
5905 case TRY_CATCH_EXPR:
5906 case TRY_FINALLY_EXPR:
5907 case EH_FILTER_EXPR:
5908 case CATCH_EXPR:
5909 case ASM_EXPR:
5910 case STATEMENT_LIST:
5911 case OACC_PARALLEL:
5912 case OACC_KERNELS:
5913 case OACC_SERIAL:
5914 case OACC_DATA:
5915 case OACC_HOST_DATA:
5916 case OACC_DECLARE:
5917 case OACC_UPDATE:
5918 case OACC_ENTER_DATA:
5919 case OACC_EXIT_DATA:
5920 case OACC_CACHE:
5921 case OMP_PARALLEL:
5922 case OMP_FOR:
5923 case OMP_SIMD:
5924 case OMP_DISTRIBUTE:
5925 case OMP_LOOP:
5926 case OACC_LOOP:
5927 case OMP_SCAN:
5928 case OMP_SCOPE:
5929 case OMP_SECTIONS:
5930 case OMP_SECTION:
5931 case OMP_SINGLE:
5932 case OMP_MASTER:
5933 case OMP_MASKED:
5934 case OMP_TASKGROUP:
5935 case OMP_ORDERED:
5936 case OMP_CRITICAL:
5937 case OMP_TASK:
5938 case OMP_TARGET:
5939 case OMP_TARGET_DATA:
5940 case OMP_TARGET_UPDATE:
5941 case OMP_TARGET_ENTER_DATA:
5942 case OMP_TARGET_EXIT_DATA:
5943 case OMP_TASKLOOP:
5944 case OMP_TEAMS:
5945 /* These are always void. */
5946 return true;
5948 case CALL_EXPR:
5949 case MODIFY_EXPR:
5950 case PREDICT_EXPR:
5951 /* These are valid regardless of their type. */
5952 return true;
5954 default:
5955 return false;
5960 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5961 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5963 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5964 other, unmodified part of the complex object just before the total store.
5965 As a consequence, if the object is still uninitialized, an undefined value
5966 will be loaded into a register, which may result in a spurious exception
5967 if the register is floating-point and the value happens to be a signaling
5968 NaN for example. Then the fully-fledged complex operations lowering pass
5969 followed by a DCE pass are necessary in order to fix things up. */
5971 static enum gimplify_status
5972 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5973 bool want_value)
5975 enum tree_code code, ocode;
5976 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5978 lhs = TREE_OPERAND (*expr_p, 0);
5979 rhs = TREE_OPERAND (*expr_p, 1);
5980 code = TREE_CODE (lhs);
5981 lhs = TREE_OPERAND (lhs, 0);
5983 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5984 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5985 suppress_warning (other);
5986 other = get_formal_tmp_var (other, pre_p);
5988 realpart = code == REALPART_EXPR ? rhs : other;
5989 imagpart = code == REALPART_EXPR ? other : rhs;
5991 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5992 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5993 else
5994 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5996 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5997 *expr_p = (want_value) ? rhs : NULL_TREE;
5999 return GS_ALL_DONE;
6002 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6004 modify_expr
6005 : varname '=' rhs
6006 | '*' ID '=' rhs
6008 PRE_P points to the list where side effects that must happen before
6009 *EXPR_P should be stored.
6011 POST_P points to the list where side effects that must happen after
6012 *EXPR_P should be stored.
6014 WANT_VALUE is nonzero iff we want to use the value of this expression
6015 in another expression. */
6017 static enum gimplify_status
6018 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6019 bool want_value)
6021 tree *from_p = &TREE_OPERAND (*expr_p, 1);
6022 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6023 enum gimplify_status ret = GS_UNHANDLED;
6024 gimple *assign;
6025 location_t loc = EXPR_LOCATION (*expr_p);
6026 gimple_stmt_iterator gsi;
6028 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
6029 || TREE_CODE (*expr_p) == INIT_EXPR);
6031 /* Trying to simplify a clobber using normal logic doesn't work,
6032 so handle it here. */
6033 if (TREE_CLOBBER_P (*from_p))
6035 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6036 if (ret == GS_ERROR)
6037 return ret;
6038 gcc_assert (!want_value);
6039 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
6041 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
6042 pre_p, post_p);
6043 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
6045 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
6046 *expr_p = NULL;
6047 return GS_ALL_DONE;
6050 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6051 memset. */
6052 if (TREE_TYPE (*from_p) != error_mark_node
6053 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p))
6054 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p)))
6055 && TREE_CODE (*from_p) == CONSTRUCTOR
6056 && CONSTRUCTOR_NELTS (*from_p) == 0)
6058 maybe_with_size_expr (from_p);
6059 gcc_assert (TREE_CODE (*from_p) == WITH_SIZE_EXPR);
6060 return gimplify_modify_expr_to_memset (expr_p,
6061 TREE_OPERAND (*from_p, 1),
6062 want_value, pre_p);
6065 /* Insert pointer conversions required by the middle-end that are not
6066 required by the frontend. This fixes middle-end type checking for
6067 for example gcc.dg/redecl-6.c. */
6068 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
6070 STRIP_USELESS_TYPE_CONVERSION (*from_p);
6071 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
6072 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
6075 /* See if any simplifications can be done based on what the RHS is. */
6076 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6077 want_value);
6078 if (ret != GS_UNHANDLED)
6079 return ret;
6081 /* For empty types only gimplify the left hand side and right hand
6082 side as statements and throw away the assignment. Do this after
6083 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6084 types properly. */
6085 if (is_empty_type (TREE_TYPE (*from_p))
6086 && !want_value
6087 /* Don't do this for calls that return addressable types, expand_call
6088 relies on those having a lhs. */
6089 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
6090 && TREE_CODE (*from_p) == CALL_EXPR))
6092 gimplify_stmt (from_p, pre_p);
6093 gimplify_stmt (to_p, pre_p);
6094 *expr_p = NULL_TREE;
6095 return GS_ALL_DONE;
6098 /* If the value being copied is of variable width, compute the length
6099 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6100 before gimplifying any of the operands so that we can resolve any
6101 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6102 the size of the expression to be copied, not of the destination, so
6103 that is what we must do here. */
6104 maybe_with_size_expr (from_p);
6106 /* As a special case, we have to temporarily allow for assignments
6107 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6108 a toplevel statement, when gimplifying the GENERIC expression
6109 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6110 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6112 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6113 prevent gimplify_expr from trying to create a new temporary for
6114 foo's LHS, we tell it that it should only gimplify until it
6115 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6116 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6117 and all we need to do here is set 'a' to be its LHS. */
6119 /* Gimplify the RHS first for C++17 and bug 71104. */
6120 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
6121 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
6122 if (ret == GS_ERROR)
6123 return ret;
6125 /* Then gimplify the LHS. */
6126 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6127 twice we have to make sure to gimplify into non-SSA as otherwise
6128 the abnormal edge added later will make those defs not dominate
6129 their uses.
6130 ??? Technically this applies only to the registers used in the
6131 resulting non-register *TO_P. */
6132 bool saved_into_ssa = gimplify_ctxp->into_ssa;
6133 if (saved_into_ssa
6134 && TREE_CODE (*from_p) == CALL_EXPR
6135 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
6136 gimplify_ctxp->into_ssa = false;
6137 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6138 gimplify_ctxp->into_ssa = saved_into_ssa;
6139 if (ret == GS_ERROR)
6140 return ret;
6142 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6143 guess for the predicate was wrong. */
6144 gimple_predicate final_pred = rhs_predicate_for (*to_p);
6145 if (final_pred != initial_pred)
6147 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
6148 if (ret == GS_ERROR)
6149 return ret;
6152 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6153 size as argument to the call. */
6154 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6156 tree call = TREE_OPERAND (*from_p, 0);
6157 tree vlasize = TREE_OPERAND (*from_p, 1);
6159 if (TREE_CODE (call) == CALL_EXPR
6160 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
6162 int nargs = call_expr_nargs (call);
6163 tree type = TREE_TYPE (call);
6164 tree ap = CALL_EXPR_ARG (call, 0);
6165 tree tag = CALL_EXPR_ARG (call, 1);
6166 tree aptag = CALL_EXPR_ARG (call, 2);
6167 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
6168 IFN_VA_ARG, type,
6169 nargs + 1, ap, tag,
6170 aptag, vlasize);
6171 TREE_OPERAND (*from_p, 0) = newcall;
6175 /* Now see if the above changed *from_p to something we handle specially. */
6176 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6177 want_value);
6178 if (ret != GS_UNHANDLED)
6179 return ret;
6181 /* If we've got a variable sized assignment between two lvalues (i.e. does
6182 not involve a call), then we can make things a bit more straightforward
6183 by converting the assignment to memcpy or memset. */
6184 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6186 tree from = TREE_OPERAND (*from_p, 0);
6187 tree size = TREE_OPERAND (*from_p, 1);
6189 if (TREE_CODE (from) == CONSTRUCTOR)
6190 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
6192 if (is_gimple_addressable (from))
6194 *from_p = from;
6195 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
6196 pre_p);
6200 /* Transform partial stores to non-addressable complex variables into
6201 total stores. This allows us to use real instead of virtual operands
6202 for these variables, which improves optimization. */
6203 if ((TREE_CODE (*to_p) == REALPART_EXPR
6204 || TREE_CODE (*to_p) == IMAGPART_EXPR)
6205 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
6206 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
6208 /* Try to alleviate the effects of the gimplification creating artificial
6209 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6210 make sure not to create DECL_DEBUG_EXPR links across functions. */
6211 if (!gimplify_ctxp->into_ssa
6212 && VAR_P (*from_p)
6213 && DECL_IGNORED_P (*from_p)
6214 && DECL_P (*to_p)
6215 && !DECL_IGNORED_P (*to_p)
6216 && decl_function_context (*to_p) == current_function_decl
6217 && decl_function_context (*from_p) == current_function_decl)
6219 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
6220 DECL_NAME (*from_p)
6221 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
6222 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
6223 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
6226 if (want_value && TREE_THIS_VOLATILE (*to_p))
6227 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
6229 if (TREE_CODE (*from_p) == CALL_EXPR)
6231 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6232 instead of a GIMPLE_ASSIGN. */
6233 gcall *call_stmt;
6234 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
6236 /* Gimplify internal functions created in the FEs. */
6237 int nargs = call_expr_nargs (*from_p), i;
6238 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
6239 auto_vec<tree> vargs (nargs);
6241 for (i = 0; i < nargs; i++)
6243 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
6244 EXPR_LOCATION (*from_p));
6245 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
6247 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
6248 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
6249 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
6251 else
6253 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
6254 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
6255 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
6256 tree fndecl = get_callee_fndecl (*from_p);
6257 if (fndecl
6258 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
6259 && call_expr_nargs (*from_p) == 3)
6260 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
6261 CALL_EXPR_ARG (*from_p, 0),
6262 CALL_EXPR_ARG (*from_p, 1),
6263 CALL_EXPR_ARG (*from_p, 2));
6264 else
6266 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
6269 notice_special_calls (call_stmt);
6270 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
6271 gimple_call_set_lhs (call_stmt, *to_p);
6272 else if (TREE_CODE (*to_p) == SSA_NAME)
6273 /* The above is somewhat premature, avoid ICEing later for a
6274 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6275 ??? This doesn't make it a default-def. */
6276 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
6278 assign = call_stmt;
6280 else
6282 assign = gimple_build_assign (*to_p, *from_p);
6283 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
6284 if (COMPARISON_CLASS_P (*from_p))
6285 copy_warning (assign, *from_p);
6288 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6290 /* We should have got an SSA name from the start. */
6291 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
6292 || ! gimple_in_ssa_p (cfun));
6295 gimplify_seq_add_stmt (pre_p, assign);
6296 gsi = gsi_last (*pre_p);
6297 maybe_fold_stmt (&gsi);
6299 if (want_value)
6301 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
6302 return GS_OK;
6304 else
6305 *expr_p = NULL;
6307 return GS_ALL_DONE;
6310 /* Gimplify a comparison between two variable-sized objects. Do this
6311 with a call to BUILT_IN_MEMCMP. */
6313 static enum gimplify_status
6314 gimplify_variable_sized_compare (tree *expr_p)
6316 location_t loc = EXPR_LOCATION (*expr_p);
6317 tree op0 = TREE_OPERAND (*expr_p, 0);
6318 tree op1 = TREE_OPERAND (*expr_p, 1);
6319 tree t, arg, dest, src, expr;
6321 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6322 arg = unshare_expr (arg);
6323 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6324 src = build_fold_addr_expr_loc (loc, op1);
6325 dest = build_fold_addr_expr_loc (loc, op0);
6326 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6327 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6329 expr
6330 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6331 SET_EXPR_LOCATION (expr, loc);
6332 *expr_p = expr;
6334 return GS_OK;
6337 /* Gimplify a comparison between two aggregate objects of integral scalar
6338 mode as a comparison between the bitwise equivalent scalar values. */
6340 static enum gimplify_status
6341 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6343 location_t loc = EXPR_LOCATION (*expr_p);
6344 tree op0 = TREE_OPERAND (*expr_p, 0);
6345 tree op1 = TREE_OPERAND (*expr_p, 1);
6347 tree type = TREE_TYPE (op0);
6348 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6350 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6351 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6353 *expr_p
6354 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6356 return GS_OK;
6359 /* Gimplify an expression sequence. This function gimplifies each
6360 expression and rewrites the original expression with the last
6361 expression of the sequence in GIMPLE form.
6363 PRE_P points to the list where the side effects for all the
6364 expressions in the sequence will be emitted.
6366 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6368 static enum gimplify_status
6369 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6371 tree t = *expr_p;
6375 tree *sub_p = &TREE_OPERAND (t, 0);
6377 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6378 gimplify_compound_expr (sub_p, pre_p, false);
6379 else
6380 gimplify_stmt (sub_p, pre_p);
6382 t = TREE_OPERAND (t, 1);
6384 while (TREE_CODE (t) == COMPOUND_EXPR);
6386 *expr_p = t;
6387 if (want_value)
6388 return GS_OK;
6389 else
6391 gimplify_stmt (expr_p, pre_p);
6392 return GS_ALL_DONE;
6396 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6397 gimplify. After gimplification, EXPR_P will point to a new temporary
6398 that holds the original value of the SAVE_EXPR node.
6400 PRE_P points to the list where side effects that must happen before
6401 *EXPR_P should be stored. */
6403 static enum gimplify_status
6404 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6406 enum gimplify_status ret = GS_ALL_DONE;
6407 tree val;
6409 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6410 val = TREE_OPERAND (*expr_p, 0);
6412 if (TREE_TYPE (val) == error_mark_node)
6413 return GS_ERROR;
6415 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6416 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6418 /* The operand may be a void-valued expression. It is
6419 being executed only for its side-effects. */
6420 if (TREE_TYPE (val) == void_type_node)
6422 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6423 is_gimple_stmt, fb_none);
6424 val = NULL;
6426 else
6427 /* The temporary may not be an SSA name as later abnormal and EH
6428 control flow may invalidate use/def domination. When in SSA
6429 form then assume there are no such issues and SAVE_EXPRs only
6430 appear via GENERIC foldings. */
6431 val = get_initialized_tmp_var (val, pre_p, post_p,
6432 gimple_in_ssa_p (cfun));
6434 TREE_OPERAND (*expr_p, 0) = val;
6435 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6438 *expr_p = val;
6440 return ret;
6443 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6445 unary_expr
6446 : ...
6447 | '&' varname
6450 PRE_P points to the list where side effects that must happen before
6451 *EXPR_P should be stored.
6453 POST_P points to the list where side effects that must happen after
6454 *EXPR_P should be stored. */
6456 static enum gimplify_status
6457 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6459 tree expr = *expr_p;
6460 tree op0 = TREE_OPERAND (expr, 0);
6461 enum gimplify_status ret;
6462 location_t loc = EXPR_LOCATION (*expr_p);
6464 switch (TREE_CODE (op0))
6466 case INDIRECT_REF:
6467 do_indirect_ref:
6468 /* Check if we are dealing with an expression of the form '&*ptr'.
6469 While the front end folds away '&*ptr' into 'ptr', these
6470 expressions may be generated internally by the compiler (e.g.,
6471 builtins like __builtin_va_end). */
6472 /* Caution: the silent array decomposition semantics we allow for
6473 ADDR_EXPR means we can't always discard the pair. */
6474 /* Gimplification of the ADDR_EXPR operand may drop
6475 cv-qualification conversions, so make sure we add them if
6476 needed. */
6478 tree op00 = TREE_OPERAND (op0, 0);
6479 tree t_expr = TREE_TYPE (expr);
6480 tree t_op00 = TREE_TYPE (op00);
6482 if (!useless_type_conversion_p (t_expr, t_op00))
6483 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6484 *expr_p = op00;
6485 ret = GS_OK;
6487 break;
6489 case VIEW_CONVERT_EXPR:
6490 /* Take the address of our operand and then convert it to the type of
6491 this ADDR_EXPR.
6493 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6494 all clear. The impact of this transformation is even less clear. */
6496 /* If the operand is a useless conversion, look through it. Doing so
6497 guarantees that the ADDR_EXPR and its operand will remain of the
6498 same type. */
6499 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6500 op0 = TREE_OPERAND (op0, 0);
6502 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6503 build_fold_addr_expr_loc (loc,
6504 TREE_OPERAND (op0, 0)));
6505 ret = GS_OK;
6506 break;
6508 case MEM_REF:
6509 if (integer_zerop (TREE_OPERAND (op0, 1)))
6510 goto do_indirect_ref;
6512 /* fall through */
6514 default:
6515 /* If we see a call to a declared builtin or see its address
6516 being taken (we can unify those cases here) then we can mark
6517 the builtin for implicit generation by GCC. */
6518 if (TREE_CODE (op0) == FUNCTION_DECL
6519 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6520 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6521 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6523 /* We use fb_either here because the C frontend sometimes takes
6524 the address of a call that returns a struct; see
6525 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6526 the implied temporary explicit. */
6528 /* Make the operand addressable. */
6529 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6530 is_gimple_addressable, fb_either);
6531 if (ret == GS_ERROR)
6532 break;
6534 /* Then mark it. Beware that it may not be possible to do so directly
6535 if a temporary has been created by the gimplification. */
6536 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6538 op0 = TREE_OPERAND (expr, 0);
6540 /* For various reasons, the gimplification of the expression
6541 may have made a new INDIRECT_REF. */
6542 if (TREE_CODE (op0) == INDIRECT_REF
6543 || (TREE_CODE (op0) == MEM_REF
6544 && integer_zerop (TREE_OPERAND (op0, 1))))
6545 goto do_indirect_ref;
6547 mark_addressable (TREE_OPERAND (expr, 0));
6549 /* The FEs may end up building ADDR_EXPRs early on a decl with
6550 an incomplete type. Re-build ADDR_EXPRs in canonical form
6551 here. */
6552 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6553 *expr_p = build_fold_addr_expr (op0);
6555 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6556 recompute_tree_invariant_for_addr_expr (*expr_p);
6558 /* If we re-built the ADDR_EXPR add a conversion to the original type
6559 if required. */
6560 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6561 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6563 break;
6566 return ret;
6569 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6570 value; output operands should be a gimple lvalue. */
6572 static enum gimplify_status
6573 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6575 tree expr;
6576 int noutputs;
6577 const char **oconstraints;
6578 int i;
6579 tree link;
6580 const char *constraint;
6581 bool allows_mem, allows_reg, is_inout;
6582 enum gimplify_status ret, tret;
6583 gasm *stmt;
6584 vec<tree, va_gc> *inputs;
6585 vec<tree, va_gc> *outputs;
6586 vec<tree, va_gc> *clobbers;
6587 vec<tree, va_gc> *labels;
6588 tree link_next;
6590 expr = *expr_p;
6591 noutputs = list_length (ASM_OUTPUTS (expr));
6592 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6594 inputs = NULL;
6595 outputs = NULL;
6596 clobbers = NULL;
6597 labels = NULL;
6599 ret = GS_ALL_DONE;
6600 link_next = NULL_TREE;
6601 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6603 bool ok;
6604 size_t constraint_len;
6606 link_next = TREE_CHAIN (link);
6608 oconstraints[i]
6609 = constraint
6610 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6611 constraint_len = strlen (constraint);
6612 if (constraint_len == 0)
6613 continue;
6615 ok = parse_output_constraint (&constraint, i, 0, 0,
6616 &allows_mem, &allows_reg, &is_inout);
6617 if (!ok)
6619 ret = GS_ERROR;
6620 is_inout = false;
6623 /* If we can't make copies, we can only accept memory.
6624 Similarly for VLAs. */
6625 tree outtype = TREE_TYPE (TREE_VALUE (link));
6626 if (outtype != error_mark_node
6627 && (TREE_ADDRESSABLE (outtype)
6628 || !COMPLETE_TYPE_P (outtype)
6629 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6631 if (allows_mem)
6632 allows_reg = 0;
6633 else
6635 error ("impossible constraint in %<asm%>");
6636 error ("non-memory output %d must stay in memory", i);
6637 return GS_ERROR;
6641 if (!allows_reg && allows_mem)
6642 mark_addressable (TREE_VALUE (link));
6644 tree orig = TREE_VALUE (link);
6645 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6646 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6647 fb_lvalue | fb_mayfail);
6648 if (tret == GS_ERROR)
6650 if (orig != error_mark_node)
6651 error ("invalid lvalue in %<asm%> output %d", i);
6652 ret = tret;
6655 /* If the constraint does not allow memory make sure we gimplify
6656 it to a register if it is not already but its base is. This
6657 happens for complex and vector components. */
6658 if (!allows_mem)
6660 tree op = TREE_VALUE (link);
6661 if (! is_gimple_val (op)
6662 && is_gimple_reg_type (TREE_TYPE (op))
6663 && is_gimple_reg (get_base_address (op)))
6665 tree tem = create_tmp_reg (TREE_TYPE (op));
6666 tree ass;
6667 if (is_inout)
6669 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6670 tem, unshare_expr (op));
6671 gimplify_and_add (ass, pre_p);
6673 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6674 gimplify_and_add (ass, post_p);
6676 TREE_VALUE (link) = tem;
6677 tret = GS_OK;
6681 vec_safe_push (outputs, link);
6682 TREE_CHAIN (link) = NULL_TREE;
6684 if (is_inout)
6686 /* An input/output operand. To give the optimizers more
6687 flexibility, split it into separate input and output
6688 operands. */
6689 tree input;
6690 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6691 char buf[11];
6693 /* Turn the in/out constraint into an output constraint. */
6694 char *p = xstrdup (constraint);
6695 p[0] = '=';
6696 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6698 /* And add a matching input constraint. */
6699 if (allows_reg)
6701 sprintf (buf, "%u", i);
6703 /* If there are multiple alternatives in the constraint,
6704 handle each of them individually. Those that allow register
6705 will be replaced with operand number, the others will stay
6706 unchanged. */
6707 if (strchr (p, ',') != NULL)
6709 size_t len = 0, buflen = strlen (buf);
6710 char *beg, *end, *str, *dst;
6712 for (beg = p + 1;;)
6714 end = strchr (beg, ',');
6715 if (end == NULL)
6716 end = strchr (beg, '\0');
6717 if ((size_t) (end - beg) < buflen)
6718 len += buflen + 1;
6719 else
6720 len += end - beg + 1;
6721 if (*end)
6722 beg = end + 1;
6723 else
6724 break;
6727 str = (char *) alloca (len);
6728 for (beg = p + 1, dst = str;;)
6730 const char *tem;
6731 bool mem_p, reg_p, inout_p;
6733 end = strchr (beg, ',');
6734 if (end)
6735 *end = '\0';
6736 beg[-1] = '=';
6737 tem = beg - 1;
6738 parse_output_constraint (&tem, i, 0, 0,
6739 &mem_p, &reg_p, &inout_p);
6740 if (dst != str)
6741 *dst++ = ',';
6742 if (reg_p)
6744 memcpy (dst, buf, buflen);
6745 dst += buflen;
6747 else
6749 if (end)
6750 len = end - beg;
6751 else
6752 len = strlen (beg);
6753 memcpy (dst, beg, len);
6754 dst += len;
6756 if (end)
6757 beg = end + 1;
6758 else
6759 break;
6761 *dst = '\0';
6762 input = build_string (dst - str, str);
6764 else
6765 input = build_string (strlen (buf), buf);
6767 else
6768 input = build_string (constraint_len - 1, constraint + 1);
6770 free (p);
6772 input = build_tree_list (build_tree_list (NULL_TREE, input),
6773 unshare_expr (TREE_VALUE (link)));
6774 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6778 link_next = NULL_TREE;
6779 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6781 link_next = TREE_CHAIN (link);
6782 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6783 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6784 oconstraints, &allows_mem, &allows_reg);
6786 /* If we can't make copies, we can only accept memory. */
6787 tree intype = TREE_TYPE (TREE_VALUE (link));
6788 if (intype != error_mark_node
6789 && (TREE_ADDRESSABLE (intype)
6790 || !COMPLETE_TYPE_P (intype)
6791 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6793 if (allows_mem)
6794 allows_reg = 0;
6795 else
6797 error ("impossible constraint in %<asm%>");
6798 error ("non-memory input %d must stay in memory", i);
6799 return GS_ERROR;
6803 /* If the operand is a memory input, it should be an lvalue. */
6804 if (!allows_reg && allows_mem)
6806 tree inputv = TREE_VALUE (link);
6807 STRIP_NOPS (inputv);
6808 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6809 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6810 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6811 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6812 || TREE_CODE (inputv) == MODIFY_EXPR)
6813 TREE_VALUE (link) = error_mark_node;
6814 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6815 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6816 if (tret != GS_ERROR)
6818 /* Unlike output operands, memory inputs are not guaranteed
6819 to be lvalues by the FE, and while the expressions are
6820 marked addressable there, if it is e.g. a statement
6821 expression, temporaries in it might not end up being
6822 addressable. They might be already used in the IL and thus
6823 it is too late to make them addressable now though. */
6824 tree x = TREE_VALUE (link);
6825 while (handled_component_p (x))
6826 x = TREE_OPERAND (x, 0);
6827 if (TREE_CODE (x) == MEM_REF
6828 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6829 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6830 if ((VAR_P (x)
6831 || TREE_CODE (x) == PARM_DECL
6832 || TREE_CODE (x) == RESULT_DECL)
6833 && !TREE_ADDRESSABLE (x)
6834 && is_gimple_reg (x))
6836 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6837 input_location), 0,
6838 "memory input %d is not directly addressable",
6840 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6843 mark_addressable (TREE_VALUE (link));
6844 if (tret == GS_ERROR)
6846 if (inputv != error_mark_node)
6847 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6848 "memory input %d is not directly addressable", i);
6849 ret = tret;
6852 else
6854 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6855 is_gimple_asm_val, fb_rvalue);
6856 if (tret == GS_ERROR)
6857 ret = tret;
6860 TREE_CHAIN (link) = NULL_TREE;
6861 vec_safe_push (inputs, link);
6864 link_next = NULL_TREE;
6865 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6867 link_next = TREE_CHAIN (link);
6868 TREE_CHAIN (link) = NULL_TREE;
6869 vec_safe_push (clobbers, link);
6872 link_next = NULL_TREE;
6873 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6875 link_next = TREE_CHAIN (link);
6876 TREE_CHAIN (link) = NULL_TREE;
6877 vec_safe_push (labels, link);
6880 /* Do not add ASMs with errors to the gimple IL stream. */
6881 if (ret != GS_ERROR)
6883 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6884 inputs, outputs, clobbers, labels);
6886 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6887 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6888 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6890 gimplify_seq_add_stmt (pre_p, stmt);
6893 return ret;
6896 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6897 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6898 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6899 return to this function.
6901 FIXME should we complexify the prequeue handling instead? Or use flags
6902 for all the cleanups and let the optimizer tighten them up? The current
6903 code seems pretty fragile; it will break on a cleanup within any
6904 non-conditional nesting. But any such nesting would be broken, anyway;
6905 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6906 and continues out of it. We can do that at the RTL level, though, so
6907 having an optimizer to tighten up try/finally regions would be a Good
6908 Thing. */
6910 static enum gimplify_status
6911 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6913 gimple_stmt_iterator iter;
6914 gimple_seq body_sequence = NULL;
6916 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6918 /* We only care about the number of conditions between the innermost
6919 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6920 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6921 int old_conds = gimplify_ctxp->conditions;
6922 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6923 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6924 gimplify_ctxp->conditions = 0;
6925 gimplify_ctxp->conditional_cleanups = NULL;
6926 gimplify_ctxp->in_cleanup_point_expr = true;
6928 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6930 gimplify_ctxp->conditions = old_conds;
6931 gimplify_ctxp->conditional_cleanups = old_cleanups;
6932 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6934 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6936 gimple *wce = gsi_stmt (iter);
6938 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6940 if (gsi_one_before_end_p (iter))
6942 /* Note that gsi_insert_seq_before and gsi_remove do not
6943 scan operands, unlike some other sequence mutators. */
6944 if (!gimple_wce_cleanup_eh_only (wce))
6945 gsi_insert_seq_before_without_update (&iter,
6946 gimple_wce_cleanup (wce),
6947 GSI_SAME_STMT);
6948 gsi_remove (&iter, true);
6949 break;
6951 else
6953 gtry *gtry;
6954 gimple_seq seq;
6955 enum gimple_try_flags kind;
6957 if (gimple_wce_cleanup_eh_only (wce))
6958 kind = GIMPLE_TRY_CATCH;
6959 else
6960 kind = GIMPLE_TRY_FINALLY;
6961 seq = gsi_split_seq_after (iter);
6963 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6964 /* Do not use gsi_replace here, as it may scan operands.
6965 We want to do a simple structural modification only. */
6966 gsi_set_stmt (&iter, gtry);
6967 iter = gsi_start (gtry->eval);
6970 else
6971 gsi_next (&iter);
6974 gimplify_seq_add_seq (pre_p, body_sequence);
6975 if (temp)
6977 *expr_p = temp;
6978 return GS_OK;
6980 else
6982 *expr_p = NULL;
6983 return GS_ALL_DONE;
6987 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6988 is the cleanup action required. EH_ONLY is true if the cleanup should
6989 only be executed if an exception is thrown, not on normal exit.
6990 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6991 only valid for clobbers. */
6993 static void
6994 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6995 bool force_uncond = false)
6997 gimple *wce;
6998 gimple_seq cleanup_stmts = NULL;
7000 /* Errors can result in improperly nested cleanups. Which results in
7001 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7002 if (seen_error ())
7003 return;
7005 if (gimple_conditional_context ())
7007 /* If we're in a conditional context, this is more complex. We only
7008 want to run the cleanup if we actually ran the initialization that
7009 necessitates it, but we want to run it after the end of the
7010 conditional context. So we wrap the try/finally around the
7011 condition and use a flag to determine whether or not to actually
7012 run the destructor. Thus
7014 test ? f(A()) : 0
7016 becomes (approximately)
7018 flag = 0;
7019 try {
7020 if (test) { A::A(temp); flag = 1; val = f(temp); }
7021 else { val = 0; }
7022 } finally {
7023 if (flag) A::~A(temp);
7027 if (force_uncond)
7029 gimplify_stmt (&cleanup, &cleanup_stmts);
7030 wce = gimple_build_wce (cleanup_stmts);
7031 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7033 else
7035 tree flag = create_tmp_var (boolean_type_node, "cleanup");
7036 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
7037 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
7039 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
7040 gimplify_stmt (&cleanup, &cleanup_stmts);
7041 wce = gimple_build_wce (cleanup_stmts);
7042 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7044 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
7045 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7046 gimplify_seq_add_stmt (pre_p, ftrue);
7048 /* Because of this manipulation, and the EH edges that jump
7049 threading cannot redirect, the temporary (VAR) will appear
7050 to be used uninitialized. Don't warn. */
7051 suppress_warning (var, OPT_Wuninitialized);
7054 else
7056 gimplify_stmt (&cleanup, &cleanup_stmts);
7057 wce = gimple_build_wce (cleanup_stmts);
7058 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7059 gimplify_seq_add_stmt (pre_p, wce);
7063 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7065 static enum gimplify_status
7066 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7068 tree targ = *expr_p;
7069 tree temp = TARGET_EXPR_SLOT (targ);
7070 tree init = TARGET_EXPR_INITIAL (targ);
7071 enum gimplify_status ret;
7073 bool unpoison_empty_seq = false;
7074 gimple_stmt_iterator unpoison_it;
7076 if (init)
7078 gimple_seq init_pre_p = NULL;
7080 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7081 to the temps list. Handle also variable length TARGET_EXPRs. */
7082 if (!poly_int_tree_p (DECL_SIZE (temp)))
7084 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
7085 gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
7086 /* FIXME: this is correct only when the size of the type does
7087 not depend on expressions evaluated in init. */
7088 gimplify_vla_decl (temp, &init_pre_p);
7090 else
7092 /* Save location where we need to place unpoisoning. It's possible
7093 that a variable will be converted to needs_to_live_in_memory. */
7094 unpoison_it = gsi_last (*pre_p);
7095 unpoison_empty_seq = gsi_end_p (unpoison_it);
7097 gimple_add_tmp_var (temp);
7100 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7101 expression is supposed to initialize the slot. */
7102 if (VOID_TYPE_P (TREE_TYPE (init)))
7103 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7104 fb_none);
7105 else
7107 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
7108 init = init_expr;
7109 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7110 fb_none);
7111 init = NULL;
7112 ggc_free (init_expr);
7114 if (ret == GS_ERROR)
7116 /* PR c++/28266 Make sure this is expanded only once. */
7117 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7118 return GS_ERROR;
7121 if (init)
7122 gimplify_and_add (init, &init_pre_p);
7124 /* Add a clobber for the temporary going out of scope, like
7125 gimplify_bind_expr. */
7126 if (gimplify_ctxp->in_cleanup_point_expr
7127 && needs_to_live_in_memory (temp))
7129 if (flag_stack_reuse == SR_ALL)
7131 tree clobber = build_clobber (TREE_TYPE (temp), CLOBBER_EOL);
7132 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
7133 gimple_push_cleanup (temp, clobber, false, pre_p, true);
7135 if (asan_poisoned_variables
7136 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
7137 && !TREE_STATIC (temp)
7138 && dbg_cnt (asan_use_after_scope)
7139 && !gimplify_omp_ctxp)
7141 tree asan_cleanup = build_asan_poison_call_expr (temp);
7142 if (asan_cleanup)
7144 if (unpoison_empty_seq)
7145 unpoison_it = gsi_start (*pre_p);
7147 asan_poison_variable (temp, false, &unpoison_it,
7148 unpoison_empty_seq);
7149 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
7154 gimple_seq_add_seq (pre_p, init_pre_p);
7156 /* If needed, push the cleanup for the temp. */
7157 if (TARGET_EXPR_CLEANUP (targ))
7158 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
7159 CLEANUP_EH_ONLY (targ), pre_p);
7161 /* Only expand this once. */
7162 TREE_OPERAND (targ, 3) = init;
7163 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7165 else
7166 /* We should have expanded this before. */
7167 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
7169 *expr_p = temp;
7170 return GS_OK;
7173 /* Gimplification of expression trees. */
7175 /* Gimplify an expression which appears at statement context. The
7176 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7177 NULL, a new sequence is allocated.
7179 Return true if we actually added a statement to the queue. */
7181 bool
7182 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
7184 gimple_seq_node last;
7186 last = gimple_seq_last (*seq_p);
7187 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
7188 return last != gimple_seq_last (*seq_p);
7191 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7192 to CTX. If entries already exist, force them to be some flavor of private.
7193 If there is no enclosing parallel, do nothing. */
7195 void
7196 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
7198 splay_tree_node n;
7200 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
7201 return;
7205 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7206 if (n != NULL)
7208 if (n->value & GOVD_SHARED)
7209 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
7210 else if (n->value & GOVD_MAP)
7211 n->value |= GOVD_MAP_TO_ONLY;
7212 else
7213 return;
7215 else if ((ctx->region_type & ORT_TARGET) != 0)
7217 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
7218 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7219 else
7220 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
7222 else if (ctx->region_type != ORT_WORKSHARE
7223 && ctx->region_type != ORT_TASKGROUP
7224 && ctx->region_type != ORT_SIMD
7225 && ctx->region_type != ORT_ACC
7226 && !(ctx->region_type & ORT_TARGET_DATA))
7227 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7229 ctx = ctx->outer_context;
7231 while (ctx);
7234 /* Similarly for each of the type sizes of TYPE. */
7236 static void
7237 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
7239 if (type == NULL || type == error_mark_node)
7240 return;
7241 type = TYPE_MAIN_VARIANT (type);
7243 if (ctx->privatized_types->add (type))
7244 return;
7246 switch (TREE_CODE (type))
7248 case INTEGER_TYPE:
7249 case ENUMERAL_TYPE:
7250 case BOOLEAN_TYPE:
7251 case REAL_TYPE:
7252 case FIXED_POINT_TYPE:
7253 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
7254 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
7255 break;
7257 case ARRAY_TYPE:
7258 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7259 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
7260 break;
7262 case RECORD_TYPE:
7263 case UNION_TYPE:
7264 case QUAL_UNION_TYPE:
7266 tree field;
7267 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7268 if (TREE_CODE (field) == FIELD_DECL)
7270 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
7271 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
7274 break;
7276 case POINTER_TYPE:
7277 case REFERENCE_TYPE:
7278 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7279 break;
7281 default:
7282 break;
7285 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
7286 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
7287 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
7290 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7292 static void
7293 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
7295 splay_tree_node n;
7296 unsigned int nflags;
7297 tree t;
7299 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
7300 return;
7302 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7303 there are constructors involved somewhere. Exception is a shared clause,
7304 there is nothing privatized in that case. */
7305 if ((flags & GOVD_SHARED) == 0
7306 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
7307 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
7308 flags |= GOVD_SEEN;
7310 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7311 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7313 /* We shouldn't be re-adding the decl with the same data
7314 sharing class. */
7315 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
7316 nflags = n->value | flags;
7317 /* The only combination of data sharing classes we should see is
7318 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7319 reduction variables to be used in data sharing clauses. */
7320 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7321 || ((nflags & GOVD_DATA_SHARE_CLASS)
7322 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7323 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7324 n->value = nflags;
7325 return;
7328 /* When adding a variable-sized variable, we have to handle all sorts
7329 of additional bits of data: the pointer replacement variable, and
7330 the parameters of the type. */
7331 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7333 /* Add the pointer replacement variable as PRIVATE if the variable
7334 replacement is private, else FIRSTPRIVATE since we'll need the
7335 address of the original variable either for SHARED, or for the
7336 copy into or out of the context. */
7337 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7339 if (flags & GOVD_MAP)
7340 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7341 else if (flags & GOVD_PRIVATE)
7342 nflags = GOVD_PRIVATE;
7343 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7344 && (flags & GOVD_FIRSTPRIVATE))
7345 || (ctx->region_type == ORT_TARGET_DATA
7346 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7347 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7348 else
7349 nflags = GOVD_FIRSTPRIVATE;
7350 nflags |= flags & GOVD_SEEN;
7351 t = DECL_VALUE_EXPR (decl);
7352 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7353 t = TREE_OPERAND (t, 0);
7354 gcc_assert (DECL_P (t));
7355 omp_add_variable (ctx, t, nflags);
7358 /* Add all of the variable and type parameters (which should have
7359 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7360 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7361 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7362 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7364 /* The variable-sized variable itself is never SHARED, only some form
7365 of PRIVATE. The sharing would take place via the pointer variable
7366 which we remapped above. */
7367 if (flags & GOVD_SHARED)
7368 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7369 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7371 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7372 alloca statement we generate for the variable, so make sure it
7373 is available. This isn't automatically needed for the SHARED
7374 case, since we won't be allocating local storage then.
7375 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7376 in this case omp_notice_variable will be called later
7377 on when it is gimplified. */
7378 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7379 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7380 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7382 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7383 && omp_privatize_by_reference (decl))
7385 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7387 /* Similar to the direct variable sized case above, we'll need the
7388 size of references being privatized. */
7389 if ((flags & GOVD_SHARED) == 0)
7391 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7392 if (t && DECL_P (t))
7393 omp_notice_variable (ctx, t, true);
7397 if (n != NULL)
7398 n->value |= flags;
7399 else
7400 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7402 /* For reductions clauses in OpenACC loop directives, by default create a
7403 copy clause on the enclosing parallel construct for carrying back the
7404 results. */
7405 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7407 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7408 while (outer_ctx)
7410 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7411 if (n != NULL)
7413 /* Ignore local variables and explicitly declared clauses. */
7414 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7415 break;
7416 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7418 /* According to the OpenACC spec, such a reduction variable
7419 should already have a copy map on a kernels construct,
7420 verify that here. */
7421 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7422 && (n->value & GOVD_MAP));
7424 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7426 /* Remove firstprivate and make it a copy map. */
7427 n->value &= ~GOVD_FIRSTPRIVATE;
7428 n->value |= GOVD_MAP;
7431 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7433 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7434 GOVD_MAP | GOVD_SEEN);
7435 break;
7437 outer_ctx = outer_ctx->outer_context;
7442 /* Notice a threadprivate variable DECL used in OMP context CTX.
7443 This just prints out diagnostics about threadprivate variable uses
7444 in untied tasks. If DECL2 is non-NULL, prevent this warning
7445 on that variable. */
7447 static bool
7448 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7449 tree decl2)
7451 splay_tree_node n;
7452 struct gimplify_omp_ctx *octx;
7454 for (octx = ctx; octx; octx = octx->outer_context)
7455 if ((octx->region_type & ORT_TARGET) != 0
7456 || octx->order_concurrent)
7458 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7459 if (n == NULL)
7461 if (octx->order_concurrent)
7463 error ("threadprivate variable %qE used in a region with"
7464 " %<order(concurrent)%> clause", DECL_NAME (decl));
7465 inform (octx->location, "enclosing region");
7467 else
7469 error ("threadprivate variable %qE used in target region",
7470 DECL_NAME (decl));
7471 inform (octx->location, "enclosing target region");
7473 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7475 if (decl2)
7476 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7479 if (ctx->region_type != ORT_UNTIED_TASK)
7480 return false;
7481 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7482 if (n == NULL)
7484 error ("threadprivate variable %qE used in untied task",
7485 DECL_NAME (decl));
7486 inform (ctx->location, "enclosing task");
7487 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7489 if (decl2)
7490 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7491 return false;
7494 /* Return true if global var DECL is device resident. */
7496 static bool
7497 device_resident_p (tree decl)
7499 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7501 if (!attr)
7502 return false;
7504 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7506 tree c = TREE_VALUE (t);
7507 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7508 return true;
7511 return false;
7514 /* Return true if DECL has an ACC DECLARE attribute. */
7516 static bool
7517 is_oacc_declared (tree decl)
7519 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7520 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7521 return declared != NULL_TREE;
7524 /* Determine outer default flags for DECL mentioned in an OMP region
7525 but not declared in an enclosing clause.
7527 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7528 remapped firstprivate instead of shared. To some extent this is
7529 addressed in omp_firstprivatize_type_sizes, but not
7530 effectively. */
7532 static unsigned
7533 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7534 bool in_code, unsigned flags)
7536 enum omp_clause_default_kind default_kind = ctx->default_kind;
7537 enum omp_clause_default_kind kind;
7539 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7540 if (ctx->region_type & ORT_TASK)
7542 tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
7544 /* The event-handle specified by a detach clause should always be firstprivate,
7545 regardless of the current default. */
7546 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7547 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7549 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7550 default_kind = kind;
7551 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7552 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7553 /* For C/C++ default({,first}private), variables with static storage duration
7554 declared in a namespace or global scope and referenced in construct
7555 must be explicitly specified, i.e. acts as default(none). */
7556 else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
7557 || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
7558 && VAR_P (decl)
7559 && is_global_var (decl)
7560 && (DECL_FILE_SCOPE_P (decl)
7561 || (DECL_CONTEXT (decl)
7562 && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
7563 && !lang_GNU_Fortran ())
7564 default_kind = OMP_CLAUSE_DEFAULT_NONE;
7566 switch (default_kind)
7568 case OMP_CLAUSE_DEFAULT_NONE:
7570 const char *rtype;
7572 if (ctx->region_type & ORT_PARALLEL)
7573 rtype = "parallel";
7574 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7575 rtype = "taskloop";
7576 else if (ctx->region_type & ORT_TASK)
7577 rtype = "task";
7578 else if (ctx->region_type & ORT_TEAMS)
7579 rtype = "teams";
7580 else
7581 gcc_unreachable ();
7583 error ("%qE not specified in enclosing %qs",
7584 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7585 inform (ctx->location, "enclosing %qs", rtype);
7587 /* FALLTHRU */
7588 case OMP_CLAUSE_DEFAULT_SHARED:
7589 flags |= GOVD_SHARED;
7590 break;
7591 case OMP_CLAUSE_DEFAULT_PRIVATE:
7592 flags |= GOVD_PRIVATE;
7593 break;
7594 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7595 flags |= GOVD_FIRSTPRIVATE;
7596 break;
7597 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7598 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7599 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7600 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7602 omp_notice_variable (octx, decl, in_code);
7603 for (; octx; octx = octx->outer_context)
7605 splay_tree_node n2;
7607 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7608 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7609 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7610 continue;
7611 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7613 flags |= GOVD_FIRSTPRIVATE;
7614 goto found_outer;
7616 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7618 flags |= GOVD_SHARED;
7619 goto found_outer;
7624 if (TREE_CODE (decl) == PARM_DECL
7625 || (!is_global_var (decl)
7626 && DECL_CONTEXT (decl) == current_function_decl))
7627 flags |= GOVD_FIRSTPRIVATE;
7628 else
7629 flags |= GOVD_SHARED;
7630 found_outer:
7631 break;
7633 default:
7634 gcc_unreachable ();
7637 return flags;
7641 /* Determine outer default flags for DECL mentioned in an OACC region
7642 but not declared in an enclosing clause. */
7644 static unsigned
7645 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7647 const char *rkind;
7648 bool on_device = false;
7649 bool is_private = false;
7650 bool declared = is_oacc_declared (decl);
7651 tree type = TREE_TYPE (decl);
7653 if (omp_privatize_by_reference (decl))
7654 type = TREE_TYPE (type);
7656 /* For Fortran COMMON blocks, only used variables in those blocks are
7657 transfered and remapped. The block itself will have a private clause to
7658 avoid transfering the data twice.
7659 The hook evaluates to false by default. For a variable in Fortran's COMMON
7660 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7661 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7662 the whole block. For C++ and Fortran, it can also be true under certain
7663 other conditions, if DECL_HAS_VALUE_EXPR. */
7664 if (RECORD_OR_UNION_TYPE_P (type))
7665 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7667 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7668 && is_global_var (decl)
7669 && device_resident_p (decl)
7670 && !is_private)
7672 on_device = true;
7673 flags |= GOVD_MAP_TO_ONLY;
7676 switch (ctx->region_type)
7678 case ORT_ACC_KERNELS:
7679 rkind = "kernels";
7681 if (is_private)
7682 flags |= GOVD_FIRSTPRIVATE;
7683 else if (AGGREGATE_TYPE_P (type))
7685 /* Aggregates default to 'present_or_copy', or 'present'. */
7686 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7687 flags |= GOVD_MAP;
7688 else
7689 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7691 else
7692 /* Scalars default to 'copy'. */
7693 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7695 break;
7697 case ORT_ACC_PARALLEL:
7698 case ORT_ACC_SERIAL:
7699 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7701 if (is_private)
7702 flags |= GOVD_FIRSTPRIVATE;
7703 else if (on_device || declared)
7704 flags |= GOVD_MAP;
7705 else if (AGGREGATE_TYPE_P (type))
7707 /* Aggregates default to 'present_or_copy', or 'present'. */
7708 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7709 flags |= GOVD_MAP;
7710 else
7711 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7713 else
7714 /* Scalars default to 'firstprivate'. */
7715 flags |= GOVD_FIRSTPRIVATE;
7717 break;
7719 default:
7720 gcc_unreachable ();
7723 if (DECL_ARTIFICIAL (decl))
7724 ; /* We can get compiler-generated decls, and should not complain
7725 about them. */
7726 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7728 error ("%qE not specified in enclosing OpenACC %qs construct",
7729 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7730 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7732 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7733 ; /* Handled above. */
7734 else
7735 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7737 return flags;
7740 /* Record the fact that DECL was used within the OMP context CTX.
7741 IN_CODE is true when real code uses DECL, and false when we should
7742 merely emit default(none) errors. Return true if DECL is going to
7743 be remapped and thus DECL shouldn't be gimplified into its
7744 DECL_VALUE_EXPR (if any). */
7746 static bool
7747 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7749 splay_tree_node n;
7750 unsigned flags = in_code ? GOVD_SEEN : 0;
7751 bool ret = false, shared;
7753 if (error_operand_p (decl))
7754 return false;
7756 if (ctx->region_type == ORT_NONE)
7757 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7759 if (is_global_var (decl))
7761 /* Threadprivate variables are predetermined. */
7762 if (DECL_THREAD_LOCAL_P (decl))
7763 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7765 if (DECL_HAS_VALUE_EXPR_P (decl))
7767 if (ctx->region_type & ORT_ACC)
7768 /* For OpenACC, defer expansion of value to avoid transfering
7769 privatized common block data instead of im-/explicitly transfered
7770 variables which are in common blocks. */
7772 else
7774 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7776 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7777 return omp_notice_threadprivate_variable (ctx, decl, value);
7781 if (gimplify_omp_ctxp->outer_context == NULL
7782 && VAR_P (decl)
7783 && oacc_get_fn_attrib (current_function_decl))
7785 location_t loc = DECL_SOURCE_LOCATION (decl);
7787 if (lookup_attribute ("omp declare target link",
7788 DECL_ATTRIBUTES (decl)))
7790 error_at (loc,
7791 "%qE with %<link%> clause used in %<routine%> function",
7792 DECL_NAME (decl));
7793 return false;
7795 else if (!lookup_attribute ("omp declare target",
7796 DECL_ATTRIBUTES (decl)))
7798 error_at (loc,
7799 "%qE requires a %<declare%> directive for use "
7800 "in a %<routine%> function", DECL_NAME (decl));
7801 return false;
7806 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7807 if ((ctx->region_type & ORT_TARGET) != 0)
7809 if (ctx->region_type & ORT_ACC)
7810 /* For OpenACC, as remarked above, defer expansion. */
7811 shared = false;
7812 else
7813 shared = true;
7815 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7816 if (n == NULL)
7818 unsigned nflags = flags;
7819 if ((ctx->region_type & ORT_ACC) == 0)
7821 bool is_declare_target = false;
7822 if (is_global_var (decl)
7823 && varpool_node::get_create (decl)->offloadable)
7825 struct gimplify_omp_ctx *octx;
7826 for (octx = ctx->outer_context;
7827 octx; octx = octx->outer_context)
7829 n = splay_tree_lookup (octx->variables,
7830 (splay_tree_key)decl);
7831 if (n
7832 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7833 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7834 break;
7836 is_declare_target = octx == NULL;
7838 if (!is_declare_target)
7840 int gdmk;
7841 enum omp_clause_defaultmap_kind kind;
7842 if (lang_hooks.decls.omp_allocatable_p (decl))
7843 gdmk = GDMK_ALLOCATABLE;
7844 else if (lang_hooks.decls.omp_scalar_target_p (decl))
7845 gdmk = GDMK_SCALAR_TARGET;
7846 else if (lang_hooks.decls.omp_scalar_p (decl, false))
7847 gdmk = GDMK_SCALAR;
7848 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7849 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7850 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7851 == POINTER_TYPE)))
7852 gdmk = GDMK_POINTER;
7853 else
7854 gdmk = GDMK_AGGREGATE;
7855 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
7856 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
7858 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
7859 nflags |= GOVD_FIRSTPRIVATE;
7860 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
7861 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
7862 else
7863 gcc_unreachable ();
7865 else if (ctx->defaultmap[gdmk] == 0)
7867 tree d = lang_hooks.decls.omp_report_decl (decl);
7868 error ("%qE not specified in enclosing %<target%>",
7869 DECL_NAME (d));
7870 inform (ctx->location, "enclosing %<target%>");
7872 else if (ctx->defaultmap[gdmk]
7873 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7874 nflags |= ctx->defaultmap[gdmk];
7875 else
7877 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7878 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7883 struct gimplify_omp_ctx *octx = ctx->outer_context;
7884 if ((ctx->region_type & ORT_ACC) && octx)
7886 /* Look in outer OpenACC contexts, to see if there's a
7887 data attribute for this variable. */
7888 omp_notice_variable (octx, decl, in_code);
7890 for (; octx; octx = octx->outer_context)
7892 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7893 break;
7894 splay_tree_node n2
7895 = splay_tree_lookup (octx->variables,
7896 (splay_tree_key) decl);
7897 if (n2)
7899 if (octx->region_type == ORT_ACC_HOST_DATA)
7900 error ("variable %qE declared in enclosing "
7901 "%<host_data%> region", DECL_NAME (decl));
7902 nflags |= GOVD_MAP;
7903 if (octx->region_type == ORT_ACC_DATA
7904 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7905 nflags |= GOVD_MAP_0LEN_ARRAY;
7906 goto found_outer;
7911 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7912 | GOVD_MAP_ALLOC_ONLY)) == flags)
7914 tree type = TREE_TYPE (decl);
7916 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7917 && omp_privatize_by_reference (decl))
7918 type = TREE_TYPE (type);
7919 if (!omp_mappable_type (type))
7921 error ("%qD referenced in target region does not have "
7922 "a mappable type", decl);
7923 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7925 else
7927 if ((ctx->region_type & ORT_ACC) != 0)
7928 nflags = oacc_default_clause (ctx, decl, flags);
7929 else
7930 nflags |= GOVD_MAP;
7933 found_outer:
7934 omp_add_variable (ctx, decl, nflags);
7936 else
7938 /* If nothing changed, there's nothing left to do. */
7939 if ((n->value & flags) == flags)
7940 return ret;
7941 flags |= n->value;
7942 n->value = flags;
7944 goto do_outer;
7947 if (n == NULL)
7949 if (ctx->region_type == ORT_WORKSHARE
7950 || ctx->region_type == ORT_TASKGROUP
7951 || ctx->region_type == ORT_SIMD
7952 || ctx->region_type == ORT_ACC
7953 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7954 goto do_outer;
7956 flags = omp_default_clause (ctx, decl, in_code, flags);
7958 if ((flags & GOVD_PRIVATE)
7959 && lang_hooks.decls.omp_private_outer_ref (decl))
7960 flags |= GOVD_PRIVATE_OUTER_REF;
7962 omp_add_variable (ctx, decl, flags);
7964 shared = (flags & GOVD_SHARED) != 0;
7965 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7966 goto do_outer;
7969 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
7970 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
7971 if (ctx->region_type == ORT_SIMD
7972 && ctx->in_for_exprs
7973 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
7974 == GOVD_PRIVATE))
7975 flags &= ~GOVD_SEEN;
7977 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7978 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7979 && DECL_SIZE (decl))
7981 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7983 splay_tree_node n2;
7984 tree t = DECL_VALUE_EXPR (decl);
7985 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7986 t = TREE_OPERAND (t, 0);
7987 gcc_assert (DECL_P (t));
7988 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7989 n2->value |= GOVD_SEEN;
7991 else if (omp_privatize_by_reference (decl)
7992 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7993 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7994 != INTEGER_CST))
7996 splay_tree_node n2;
7997 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7998 gcc_assert (DECL_P (t));
7999 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8000 if (n2)
8001 omp_notice_variable (ctx, t, true);
8005 if (ctx->region_type & ORT_ACC)
8006 /* For OpenACC, as remarked above, defer expansion. */
8007 shared = false;
8008 else
8009 shared = ((flags | n->value) & GOVD_SHARED) != 0;
8010 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8012 /* If nothing changed, there's nothing left to do. */
8013 if ((n->value & flags) == flags)
8014 return ret;
8015 flags |= n->value;
8016 n->value = flags;
8018 do_outer:
8019 /* If the variable is private in the current context, then we don't
8020 need to propagate anything to an outer context. */
8021 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
8022 return ret;
8023 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8024 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8025 return ret;
8026 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8027 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8028 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8029 return ret;
8030 if (ctx->outer_context
8031 && omp_notice_variable (ctx->outer_context, decl, in_code))
8032 return true;
8033 return ret;
8036 /* Verify that DECL is private within CTX. If there's specific information
8037 to the contrary in the innermost scope, generate an error. */
8039 static bool
8040 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
8042 splay_tree_node n;
8044 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8045 if (n != NULL)
8047 if (n->value & GOVD_SHARED)
8049 if (ctx == gimplify_omp_ctxp)
8051 if (simd)
8052 error ("iteration variable %qE is predetermined linear",
8053 DECL_NAME (decl));
8054 else
8055 error ("iteration variable %qE should be private",
8056 DECL_NAME (decl));
8057 n->value = GOVD_PRIVATE;
8058 return true;
8060 else
8061 return false;
8063 else if ((n->value & GOVD_EXPLICIT) != 0
8064 && (ctx == gimplify_omp_ctxp
8065 || (ctx->region_type == ORT_COMBINED_PARALLEL
8066 && gimplify_omp_ctxp->outer_context == ctx)))
8068 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
8069 error ("iteration variable %qE should not be firstprivate",
8070 DECL_NAME (decl));
8071 else if ((n->value & GOVD_REDUCTION) != 0)
8072 error ("iteration variable %qE should not be reduction",
8073 DECL_NAME (decl));
8074 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
8075 error ("iteration variable %qE should not be linear",
8076 DECL_NAME (decl));
8078 return (ctx == gimplify_omp_ctxp
8079 || (ctx->region_type == ORT_COMBINED_PARALLEL
8080 && gimplify_omp_ctxp->outer_context == ctx));
8083 if (ctx->region_type != ORT_WORKSHARE
8084 && ctx->region_type != ORT_TASKGROUP
8085 && ctx->region_type != ORT_SIMD
8086 && ctx->region_type != ORT_ACC)
8087 return false;
8088 else if (ctx->outer_context)
8089 return omp_is_private (ctx->outer_context, decl, simd);
8090 return false;
8093 /* Return true if DECL is private within a parallel region
8094 that binds to the current construct's context or in parallel
8095 region's REDUCTION clause. */
8097 static bool
8098 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
8100 splay_tree_node n;
8104 ctx = ctx->outer_context;
8105 if (ctx == NULL)
8107 if (is_global_var (decl))
8108 return false;
8110 /* References might be private, but might be shared too,
8111 when checking for copyprivate, assume they might be
8112 private, otherwise assume they might be shared. */
8113 if (copyprivate)
8114 return true;
8116 if (omp_privatize_by_reference (decl))
8117 return false;
8119 /* Treat C++ privatized non-static data members outside
8120 of the privatization the same. */
8121 if (omp_member_access_dummy_var (decl))
8122 return false;
8124 return true;
8127 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8129 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8130 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
8132 if ((ctx->region_type & ORT_TARGET_DATA) != 0
8133 || n == NULL
8134 || (n->value & GOVD_MAP) == 0)
8135 continue;
8136 return false;
8139 if (n != NULL)
8141 if ((n->value & GOVD_LOCAL) != 0
8142 && omp_member_access_dummy_var (decl))
8143 return false;
8144 return (n->value & GOVD_SHARED) == 0;
8147 if (ctx->region_type == ORT_WORKSHARE
8148 || ctx->region_type == ORT_TASKGROUP
8149 || ctx->region_type == ORT_SIMD
8150 || ctx->region_type == ORT_ACC)
8151 continue;
8153 break;
8155 while (1);
8156 return false;
8159 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8161 static tree
8162 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
8164 tree t = *tp;
8166 /* If this node has been visited, unmark it and keep looking. */
8167 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
8168 return t;
8170 if (IS_TYPE_OR_DECL_P (t))
8171 *walk_subtrees = 0;
8172 return NULL_TREE;
8176 /* Gimplify the affinity clause but effectively ignore it.
8177 Generate:
8178 var = begin;
8179 if ((step > 1) ? var <= end : var > end)
8180 locatator_var_expr; */
8182 static void
8183 gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
8185 tree last_iter = NULL_TREE;
8186 tree last_bind = NULL_TREE;
8187 tree label = NULL_TREE;
8188 tree *last_body = NULL;
8189 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8190 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
8192 tree t = OMP_CLAUSE_DECL (c);
8193 if (TREE_CODE (t) == TREE_LIST
8194 && TREE_PURPOSE (t)
8195 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8197 if (TREE_VALUE (t) == null_pointer_node)
8198 continue;
8199 if (TREE_PURPOSE (t) != last_iter)
8201 if (last_bind)
8203 append_to_statement_list (label, last_body);
8204 gimplify_and_add (last_bind, pre_p);
8205 last_bind = NULL_TREE;
8207 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8209 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8210 is_gimple_val, fb_rvalue) == GS_ERROR
8211 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8212 is_gimple_val, fb_rvalue) == GS_ERROR
8213 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8214 is_gimple_val, fb_rvalue) == GS_ERROR
8215 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8216 is_gimple_val, fb_rvalue)
8217 == GS_ERROR))
8218 return;
8220 last_iter = TREE_PURPOSE (t);
8221 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8222 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
8223 NULL, block);
8224 last_body = &BIND_EXPR_BODY (last_bind);
8225 tree cond = NULL_TREE;
8226 location_t loc = OMP_CLAUSE_LOCATION (c);
8227 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8229 tree var = TREE_VEC_ELT (it, 0);
8230 tree begin = TREE_VEC_ELT (it, 1);
8231 tree end = TREE_VEC_ELT (it, 2);
8232 tree step = TREE_VEC_ELT (it, 3);
8233 loc = DECL_SOURCE_LOCATION (var);
8234 tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8235 var, begin);
8236 append_to_statement_list_force (tem, last_body);
8238 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8239 step, build_zero_cst (TREE_TYPE (step)));
8240 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
8241 var, end);
8242 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8243 var, end);
8244 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
8245 cond1, cond2, cond3);
8246 if (cond)
8247 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
8248 boolean_type_node, cond, cond1);
8249 else
8250 cond = cond1;
8252 tree cont_label = create_artificial_label (loc);
8253 label = build1 (LABEL_EXPR, void_type_node, cont_label);
8254 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
8255 void_node,
8256 build_and_jump (&cont_label));
8257 append_to_statement_list_force (tem, last_body);
8259 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8261 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
8262 last_body);
8263 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8265 if (error_operand_p (TREE_VALUE (t)))
8266 return;
8267 append_to_statement_list_force (TREE_VALUE (t), last_body);
8268 TREE_VALUE (t) = null_pointer_node;
8270 else
8272 if (last_bind)
8274 append_to_statement_list (label, last_body);
8275 gimplify_and_add (last_bind, pre_p);
8276 last_bind = NULL_TREE;
8278 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8280 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8281 NULL, is_gimple_val, fb_rvalue);
8282 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8284 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8285 return;
8286 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8287 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
8288 return;
8289 gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
8292 if (last_bind)
8294 append_to_statement_list (label, last_body);
8295 gimplify_and_add (last_bind, pre_p);
8297 return;
8300 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8301 lower all the depend clauses by populating corresponding depend
8302 array. Returns 0 if there are no such depend clauses, or
8303 2 if all depend clauses should be removed, 1 otherwise. */
8305 static int
8306 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
8308 tree c;
8309 gimple *g;
8310 size_t n[5] = { 0, 0, 0, 0, 0 };
8311 bool unused[5];
8312 tree counts[5] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
8313 tree last_iter = NULL_TREE, last_count = NULL_TREE;
8314 size_t i, j;
8315 location_t first_loc = UNKNOWN_LOCATION;
8317 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8318 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8320 switch (OMP_CLAUSE_DEPEND_KIND (c))
8322 case OMP_CLAUSE_DEPEND_IN:
8323 i = 2;
8324 break;
8325 case OMP_CLAUSE_DEPEND_OUT:
8326 case OMP_CLAUSE_DEPEND_INOUT:
8327 i = 0;
8328 break;
8329 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8330 i = 1;
8331 break;
8332 case OMP_CLAUSE_DEPEND_DEPOBJ:
8333 i = 3;
8334 break;
8335 case OMP_CLAUSE_DEPEND_INOUTSET:
8336 i = 4;
8337 break;
8338 default:
8339 gcc_unreachable ();
8341 tree t = OMP_CLAUSE_DECL (c);
8342 if (first_loc == UNKNOWN_LOCATION)
8343 first_loc = OMP_CLAUSE_LOCATION (c);
8344 if (TREE_CODE (t) == TREE_LIST
8345 && TREE_PURPOSE (t)
8346 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8348 if (TREE_PURPOSE (t) != last_iter)
8350 tree tcnt = size_one_node;
8351 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8353 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8354 is_gimple_val, fb_rvalue) == GS_ERROR
8355 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8356 is_gimple_val, fb_rvalue) == GS_ERROR
8357 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8358 is_gimple_val, fb_rvalue) == GS_ERROR
8359 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8360 is_gimple_val, fb_rvalue)
8361 == GS_ERROR))
8362 return 2;
8363 tree var = TREE_VEC_ELT (it, 0);
8364 tree begin = TREE_VEC_ELT (it, 1);
8365 tree end = TREE_VEC_ELT (it, 2);
8366 tree step = TREE_VEC_ELT (it, 3);
8367 tree orig_step = TREE_VEC_ELT (it, 4);
8368 tree type = TREE_TYPE (var);
8369 tree stype = TREE_TYPE (step);
8370 location_t loc = DECL_SOURCE_LOCATION (var);
8371 tree endmbegin;
8372 /* Compute count for this iterator as
8373 orig_step > 0
8374 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8375 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8376 and compute product of those for the entire depend
8377 clause. */
8378 if (POINTER_TYPE_P (type))
8379 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8380 stype, end, begin);
8381 else
8382 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8383 end, begin);
8384 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8385 step,
8386 build_int_cst (stype, 1));
8387 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8388 build_int_cst (stype, 1));
8389 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8390 unshare_expr (endmbegin),
8391 stepm1);
8392 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8393 pos, step);
8394 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8395 endmbegin, stepp1);
8396 if (TYPE_UNSIGNED (stype))
8398 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8399 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8401 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8402 neg, step);
8403 step = NULL_TREE;
8404 tree cond = fold_build2_loc (loc, LT_EXPR,
8405 boolean_type_node,
8406 begin, end);
8407 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8408 build_int_cst (stype, 0));
8409 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8410 end, begin);
8411 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8412 build_int_cst (stype, 0));
8413 tree osteptype = TREE_TYPE (orig_step);
8414 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8415 orig_step,
8416 build_int_cst (osteptype, 0));
8417 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8418 cond, pos, neg);
8419 cnt = fold_convert_loc (loc, sizetype, cnt);
8420 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8421 fb_rvalue) == GS_ERROR)
8422 return 2;
8423 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8425 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8426 fb_rvalue) == GS_ERROR)
8427 return 2;
8428 last_iter = TREE_PURPOSE (t);
8429 last_count = tcnt;
8431 if (counts[i] == NULL_TREE)
8432 counts[i] = last_count;
8433 else
8434 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8435 PLUS_EXPR, counts[i], last_count);
8437 else
8438 n[i]++;
8440 for (i = 0; i < 5; i++)
8441 if (counts[i])
8442 break;
8443 if (i == 5)
8444 return 0;
8446 tree total = size_zero_node;
8447 for (i = 0; i < 5; i++)
8449 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8450 if (counts[i] == NULL_TREE)
8451 counts[i] = size_zero_node;
8452 if (n[i])
8453 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8454 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8455 fb_rvalue) == GS_ERROR)
8456 return 2;
8457 total = size_binop (PLUS_EXPR, total, counts[i]);
8460 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8461 == GS_ERROR)
8462 return 2;
8463 bool is_old = unused[1] && unused[3] && unused[4];
8464 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8465 size_int (is_old ? 1 : 4));
8466 if (!unused[4])
8467 totalpx = size_binop (PLUS_EXPR, totalpx,
8468 size_binop (MULT_EXPR, counts[4], size_int (2)));
8469 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8470 tree array = create_tmp_var_raw (type);
8471 TREE_ADDRESSABLE (array) = 1;
8472 if (!poly_int_tree_p (totalpx))
8474 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8475 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8476 if (gimplify_omp_ctxp)
8478 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8479 while (ctx
8480 && (ctx->region_type == ORT_WORKSHARE
8481 || ctx->region_type == ORT_TASKGROUP
8482 || ctx->region_type == ORT_SIMD
8483 || ctx->region_type == ORT_ACC))
8484 ctx = ctx->outer_context;
8485 if (ctx)
8486 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
8488 gimplify_vla_decl (array, pre_p);
8490 else
8491 gimple_add_tmp_var (array);
8492 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8493 NULL_TREE);
8494 tree tem;
8495 if (!is_old)
8497 tem = build2 (MODIFY_EXPR, void_type_node, r,
8498 build_int_cst (ptr_type_node, 0));
8499 gimplify_and_add (tem, pre_p);
8500 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8501 NULL_TREE);
8503 tem = build2 (MODIFY_EXPR, void_type_node, r,
8504 fold_convert (ptr_type_node, total));
8505 gimplify_and_add (tem, pre_p);
8506 for (i = 1; i < (is_old ? 2 : 4); i++)
8508 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8509 NULL_TREE, NULL_TREE);
8510 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8511 gimplify_and_add (tem, pre_p);
8514 tree cnts[6];
8515 for (j = 5; j; j--)
8516 if (!unused[j - 1])
8517 break;
8518 for (i = 0; i < 5; i++)
8520 if (i && (i >= j || unused[i - 1]))
8522 cnts[i] = cnts[i - 1];
8523 continue;
8525 cnts[i] = create_tmp_var (sizetype);
8526 if (i == 0)
8527 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8528 else
8530 tree t;
8531 if (is_old)
8532 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8533 else
8534 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8535 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8536 == GS_ERROR)
8537 return 2;
8538 g = gimple_build_assign (cnts[i], t);
8540 gimple_seq_add_stmt (pre_p, g);
8542 if (unused[4])
8543 cnts[5] = NULL_TREE;
8544 else
8546 tree t = size_binop (PLUS_EXPR, total, size_int (5));
8547 cnts[5] = create_tmp_var (sizetype);
8548 g = gimple_build_assign (cnts[i], t);
8549 gimple_seq_add_stmt (pre_p, g);
8552 last_iter = NULL_TREE;
8553 tree last_bind = NULL_TREE;
8554 tree *last_body = NULL;
8555 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8556 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8558 switch (OMP_CLAUSE_DEPEND_KIND (c))
8560 case OMP_CLAUSE_DEPEND_IN:
8561 i = 2;
8562 break;
8563 case OMP_CLAUSE_DEPEND_OUT:
8564 case OMP_CLAUSE_DEPEND_INOUT:
8565 i = 0;
8566 break;
8567 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8568 i = 1;
8569 break;
8570 case OMP_CLAUSE_DEPEND_DEPOBJ:
8571 i = 3;
8572 break;
8573 case OMP_CLAUSE_DEPEND_INOUTSET:
8574 i = 4;
8575 break;
8576 default:
8577 gcc_unreachable ();
8579 tree t = OMP_CLAUSE_DECL (c);
8580 if (TREE_CODE (t) == TREE_LIST
8581 && TREE_PURPOSE (t)
8582 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8584 if (TREE_PURPOSE (t) != last_iter)
8586 if (last_bind)
8587 gimplify_and_add (last_bind, pre_p);
8588 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8589 last_bind = build3 (BIND_EXPR, void_type_node,
8590 BLOCK_VARS (block), NULL, block);
8591 TREE_SIDE_EFFECTS (last_bind) = 1;
8592 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8593 tree *p = &BIND_EXPR_BODY (last_bind);
8594 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8596 tree var = TREE_VEC_ELT (it, 0);
8597 tree begin = TREE_VEC_ELT (it, 1);
8598 tree end = TREE_VEC_ELT (it, 2);
8599 tree step = TREE_VEC_ELT (it, 3);
8600 tree orig_step = TREE_VEC_ELT (it, 4);
8601 tree type = TREE_TYPE (var);
8602 location_t loc = DECL_SOURCE_LOCATION (var);
8603 /* Emit:
8604 var = begin;
8605 goto cond_label;
8606 beg_label:
8608 var = var + step;
8609 cond_label:
8610 if (orig_step > 0) {
8611 if (var < end) goto beg_label;
8612 } else {
8613 if (var > end) goto beg_label;
8615 for each iterator, with inner iterators added to
8616 the ... above. */
8617 tree beg_label = create_artificial_label (loc);
8618 tree cond_label = NULL_TREE;
8619 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8620 var, begin);
8621 append_to_statement_list_force (tem, p);
8622 tem = build_and_jump (&cond_label);
8623 append_to_statement_list_force (tem, p);
8624 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8625 append_to_statement_list (tem, p);
8626 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8627 NULL_TREE, NULL_TREE);
8628 TREE_SIDE_EFFECTS (bind) = 1;
8629 SET_EXPR_LOCATION (bind, loc);
8630 append_to_statement_list_force (bind, p);
8631 if (POINTER_TYPE_P (type))
8632 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8633 var, fold_convert_loc (loc, sizetype,
8634 step));
8635 else
8636 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8637 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8638 var, tem);
8639 append_to_statement_list_force (tem, p);
8640 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8641 append_to_statement_list (tem, p);
8642 tree cond = fold_build2_loc (loc, LT_EXPR,
8643 boolean_type_node,
8644 var, end);
8645 tree pos
8646 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8647 cond, build_and_jump (&beg_label),
8648 void_node);
8649 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8650 var, end);
8651 tree neg
8652 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8653 cond, build_and_jump (&beg_label),
8654 void_node);
8655 tree osteptype = TREE_TYPE (orig_step);
8656 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8657 orig_step,
8658 build_int_cst (osteptype, 0));
8659 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8660 cond, pos, neg);
8661 append_to_statement_list_force (tem, p);
8662 p = &BIND_EXPR_BODY (bind);
8664 last_body = p;
8666 last_iter = TREE_PURPOSE (t);
8667 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8669 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8670 0), last_body);
8671 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8673 if (error_operand_p (TREE_VALUE (t)))
8674 return 2;
8675 if (TREE_VALUE (t) != null_pointer_node)
8676 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8677 if (i == 4)
8679 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8680 NULL_TREE, NULL_TREE);
8681 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8682 NULL_TREE, NULL_TREE);
8683 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
8684 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8685 void_type_node, r, r2);
8686 append_to_statement_list_force (tem, last_body);
8687 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8688 void_type_node, cnts[i],
8689 size_binop (PLUS_EXPR, cnts[i],
8690 size_int (1)));
8691 append_to_statement_list_force (tem, last_body);
8692 i = 5;
8694 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8695 NULL_TREE, NULL_TREE);
8696 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8697 void_type_node, r, TREE_VALUE (t));
8698 append_to_statement_list_force (tem, last_body);
8699 if (i == 5)
8701 r = build4 (ARRAY_REF, ptr_type_node, array,
8702 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
8703 NULL_TREE, NULL_TREE);
8704 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
8705 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8706 void_type_node, r, tem);
8707 append_to_statement_list_force (tem, last_body);
8709 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8710 void_type_node, cnts[i],
8711 size_binop (PLUS_EXPR, cnts[i],
8712 size_int (1 + (i == 5))));
8713 append_to_statement_list_force (tem, last_body);
8714 TREE_VALUE (t) = null_pointer_node;
8716 else
8718 if (last_bind)
8720 gimplify_and_add (last_bind, pre_p);
8721 last_bind = NULL_TREE;
8723 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8725 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8726 NULL, is_gimple_val, fb_rvalue);
8727 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8729 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8730 return 2;
8731 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
8732 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8733 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8734 is_gimple_val, fb_rvalue) == GS_ERROR)
8735 return 2;
8736 if (i == 4)
8738 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8739 NULL_TREE, NULL_TREE);
8740 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8741 NULL_TREE, NULL_TREE);
8742 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
8743 tem = build2 (MODIFY_EXPR, void_type_node, r, r2);
8744 gimplify_and_add (tem, pre_p);
8745 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR,
8746 cnts[i],
8747 size_int (1)));
8748 gimple_seq_add_stmt (pre_p, g);
8749 i = 5;
8751 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8752 NULL_TREE, NULL_TREE);
8753 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8754 gimplify_and_add (tem, pre_p);
8755 if (i == 5)
8757 r = build4 (ARRAY_REF, ptr_type_node, array,
8758 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
8759 NULL_TREE, NULL_TREE);
8760 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
8761 tem = build2 (MODIFY_EXPR, void_type_node, r, tem);
8762 append_to_statement_list_force (tem, last_body);
8763 gimplify_and_add (tem, pre_p);
8765 g = gimple_build_assign (cnts[i],
8766 size_binop (PLUS_EXPR, cnts[i],
8767 size_int (1 + (i == 5))));
8768 gimple_seq_add_stmt (pre_p, g);
8771 if (last_bind)
8772 gimplify_and_add (last_bind, pre_p);
8773 tree cond = boolean_false_node;
8774 if (is_old)
8776 if (!unused[0])
8777 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8778 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8779 size_int (2)));
8780 if (!unused[2])
8781 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8782 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8783 cnts[2],
8784 size_binop_loc (first_loc, PLUS_EXPR,
8785 totalpx,
8786 size_int (1))));
8788 else
8790 tree prev = size_int (5);
8791 for (i = 0; i < 5; i++)
8793 if (unused[i])
8794 continue;
8795 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8796 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8797 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8798 cnts[i], unshare_expr (prev)));
8801 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8802 build_call_expr_loc (first_loc,
8803 builtin_decl_explicit (BUILT_IN_TRAP),
8804 0), void_node);
8805 gimplify_and_add (tem, pre_p);
8806 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8807 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8808 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8809 OMP_CLAUSE_CHAIN (c) = *list_p;
8810 *list_p = c;
8811 return 1;
8814 /* For a set of mappings describing an array section pointed to by a struct
8815 (or derived type, etc.) component, create an "alloc" or "release" node to
8816 insert into a list following a GOMP_MAP_STRUCT node. For some types of
8817 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
8818 be created that is inserted into the list of mapping nodes attached to the
8819 directive being processed -- not part of the sorted list of nodes after
8820 GOMP_MAP_STRUCT.
8822 CODE is the code of the directive being processed. GRP_START and GRP_END
8823 are the first and last of two or three nodes representing this array section
8824 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
8825 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
8826 filled with the additional node described above, if needed.
8828 This function does not add the new nodes to any lists itself. It is the
8829 responsibility of the caller to do that. */
8831 static tree
8832 build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
8833 tree *extra_node)
8835 enum gomp_map_kind mkind
8836 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8837 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8839 gcc_assert (grp_start != grp_end);
8841 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
8842 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8843 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
8844 OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
8845 tree grp_mid = NULL_TREE;
8846 if (OMP_CLAUSE_CHAIN (grp_start) != grp_end)
8847 grp_mid = OMP_CLAUSE_CHAIN (grp_start);
8849 if (grp_mid
8850 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
8851 && OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_TO_PSET)
8852 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (grp_mid);
8853 else
8854 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8856 if (grp_mid
8857 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
8858 && (OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ALWAYS_POINTER
8859 || OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ATTACH_DETACH))
8861 tree c3
8862 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
8863 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8864 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
8865 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8866 OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
8868 *extra_node = c3;
8870 else
8871 *extra_node = NULL_TREE;
8873 return c2;
8876 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8877 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8878 If BASE_REF is non-NULL and the containing object is a reference, set
8879 *BASE_REF to that reference before dereferencing the object.
8880 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8881 has array type, else return NULL. */
8883 static tree
8884 extract_base_bit_offset (tree base, poly_int64 *bitposp,
8885 poly_offset_int *poffsetp)
8887 tree offset;
8888 poly_int64 bitsize, bitpos;
8889 machine_mode mode;
8890 int unsignedp, reversep, volatilep = 0;
8891 poly_offset_int poffset;
8893 STRIP_NOPS (base);
8895 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8896 &unsignedp, &reversep, &volatilep);
8898 STRIP_NOPS (base);
8900 if (offset && poly_int_tree_p (offset))
8902 poffset = wi::to_poly_offset (offset);
8903 offset = NULL_TREE;
8905 else
8906 poffset = 0;
8908 if (maybe_ne (bitpos, 0))
8909 poffset += bits_to_bytes_round_down (bitpos);
8911 *bitposp = bitpos;
8912 *poffsetp = poffset;
8914 return base;
8917 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
8918 started processing the group yet. The TEMPORARY mark is used when we first
8919 encounter a group on a depth-first traversal, and the PERMANENT mark is used
8920 when we have processed all the group's children (i.e. all the base pointers
8921 referred to by the group's mapping nodes, recursively). */
8923 enum omp_tsort_mark {
8924 UNVISITED,
8925 TEMPORARY,
8926 PERMANENT
8929 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
8930 clause. */
8932 struct omp_mapping_group {
8933 tree *grp_start;
8934 tree grp_end;
8935 omp_tsort_mark mark;
8936 /* If we've removed the group but need to reindex, mark the group as
8937 deleted. */
8938 bool deleted;
8939 struct omp_mapping_group *sibling;
8940 struct omp_mapping_group *next;
8943 DEBUG_FUNCTION void
8944 debug_mapping_group (omp_mapping_group *grp)
8946 tree tmp = OMP_CLAUSE_CHAIN (grp->grp_end);
8947 OMP_CLAUSE_CHAIN (grp->grp_end) = NULL;
8948 debug_generic_expr (*grp->grp_start);
8949 OMP_CLAUSE_CHAIN (grp->grp_end) = tmp;
8952 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
8953 isn't one. */
8955 static tree
8956 omp_get_base_pointer (tree expr)
8958 while (TREE_CODE (expr) == ARRAY_REF
8959 || TREE_CODE (expr) == COMPONENT_REF)
8960 expr = TREE_OPERAND (expr, 0);
8962 if (TREE_CODE (expr) == INDIRECT_REF
8963 || (TREE_CODE (expr) == MEM_REF
8964 && integer_zerop (TREE_OPERAND (expr, 1))))
8966 expr = TREE_OPERAND (expr, 0);
8967 while (TREE_CODE (expr) == COMPOUND_EXPR)
8968 expr = TREE_OPERAND (expr, 1);
8969 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
8970 expr = TREE_OPERAND (expr, 0);
8971 if (TREE_CODE (expr) == SAVE_EXPR)
8972 expr = TREE_OPERAND (expr, 0);
8973 STRIP_NOPS (expr);
8974 return expr;
8977 return NULL_TREE;
8980 /* Remove COMPONENT_REFS and indirections from EXPR. */
8982 static tree
8983 omp_strip_components_and_deref (tree expr)
8985 while (TREE_CODE (expr) == COMPONENT_REF
8986 || TREE_CODE (expr) == INDIRECT_REF
8987 || (TREE_CODE (expr) == MEM_REF
8988 && integer_zerop (TREE_OPERAND (expr, 1)))
8989 || TREE_CODE (expr) == POINTER_PLUS_EXPR
8990 || TREE_CODE (expr) == COMPOUND_EXPR)
8991 if (TREE_CODE (expr) == COMPOUND_EXPR)
8992 expr = TREE_OPERAND (expr, 1);
8993 else
8994 expr = TREE_OPERAND (expr, 0);
8996 STRIP_NOPS (expr);
8998 return expr;
9001 static tree
9002 omp_strip_indirections (tree expr)
9004 while (TREE_CODE (expr) == INDIRECT_REF
9005 || (TREE_CODE (expr) == MEM_REF
9006 && integer_zerop (TREE_OPERAND (expr, 1))))
9007 expr = TREE_OPERAND (expr, 0);
9009 return expr;
9012 /* An attach or detach operation depends directly on the address being
9013 attached/detached. Return that address, or none if there are no
9014 attachments/detachments. */
9016 static tree
9017 omp_get_attachment (omp_mapping_group *grp)
9019 tree node = *grp->grp_start;
9021 switch (OMP_CLAUSE_MAP_KIND (node))
9023 case GOMP_MAP_TO:
9024 case GOMP_MAP_FROM:
9025 case GOMP_MAP_TOFROM:
9026 case GOMP_MAP_ALWAYS_FROM:
9027 case GOMP_MAP_ALWAYS_TO:
9028 case GOMP_MAP_ALWAYS_TOFROM:
9029 case GOMP_MAP_FORCE_FROM:
9030 case GOMP_MAP_FORCE_TO:
9031 case GOMP_MAP_FORCE_TOFROM:
9032 case GOMP_MAP_FORCE_PRESENT:
9033 case GOMP_MAP_ALLOC:
9034 case GOMP_MAP_RELEASE:
9035 case GOMP_MAP_DELETE:
9036 case GOMP_MAP_FORCE_ALLOC:
9037 if (node == grp->grp_end)
9038 return NULL_TREE;
9040 node = OMP_CLAUSE_CHAIN (node);
9041 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9043 gcc_assert (node != grp->grp_end);
9044 node = OMP_CLAUSE_CHAIN (node);
9046 if (node)
9047 switch (OMP_CLAUSE_MAP_KIND (node))
9049 case GOMP_MAP_POINTER:
9050 case GOMP_MAP_ALWAYS_POINTER:
9051 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9052 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9053 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9054 return NULL_TREE;
9056 case GOMP_MAP_ATTACH_DETACH:
9057 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9058 return OMP_CLAUSE_DECL (node);
9060 default:
9061 internal_error ("unexpected mapping node");
9063 return error_mark_node;
9065 case GOMP_MAP_TO_PSET:
9066 gcc_assert (node != grp->grp_end);
9067 node = OMP_CLAUSE_CHAIN (node);
9068 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9069 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9070 return OMP_CLAUSE_DECL (node);
9071 else
9072 internal_error ("unexpected mapping node");
9073 return error_mark_node;
9075 case GOMP_MAP_ATTACH:
9076 case GOMP_MAP_DETACH:
9077 node = OMP_CLAUSE_CHAIN (node);
9078 if (!node || *grp->grp_start == grp->grp_end)
9079 return OMP_CLAUSE_DECL (*grp->grp_start);
9080 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9081 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9082 return OMP_CLAUSE_DECL (*grp->grp_start);
9083 else
9084 internal_error ("unexpected mapping node");
9085 return error_mark_node;
9087 case GOMP_MAP_STRUCT:
9088 case GOMP_MAP_FORCE_DEVICEPTR:
9089 case GOMP_MAP_DEVICE_RESIDENT:
9090 case GOMP_MAP_LINK:
9091 case GOMP_MAP_IF_PRESENT:
9092 case GOMP_MAP_FIRSTPRIVATE:
9093 case GOMP_MAP_FIRSTPRIVATE_INT:
9094 case GOMP_MAP_USE_DEVICE_PTR:
9095 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9096 return NULL_TREE;
9098 default:
9099 internal_error ("unexpected mapping node");
9102 return error_mark_node;
9105 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9106 mappings, return the chain pointer to the end of that group in the list. */
9108 static tree *
9109 omp_group_last (tree *start_p)
9111 tree c = *start_p, nc, *grp_last_p = start_p;
9113 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9115 nc = OMP_CLAUSE_CHAIN (c);
9117 if (!nc || OMP_CLAUSE_CODE (nc) != OMP_CLAUSE_MAP)
9118 return grp_last_p;
9120 switch (OMP_CLAUSE_MAP_KIND (c))
9122 default:
9123 while (nc
9124 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9125 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9126 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9127 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH
9128 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
9129 || (OMP_CLAUSE_MAP_KIND (nc)
9130 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9131 || (OMP_CLAUSE_MAP_KIND (nc)
9132 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)
9133 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ALWAYS_POINTER
9134 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_TO_PSET))
9136 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9137 c = nc;
9138 tree nc2 = OMP_CLAUSE_CHAIN (nc);
9139 if (nc2
9140 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9141 && (OMP_CLAUSE_MAP_KIND (nc)
9142 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9143 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH)
9145 grp_last_p = &OMP_CLAUSE_CHAIN (nc);
9146 c = nc2;
9147 nc2 = OMP_CLAUSE_CHAIN (nc2);
9149 nc = nc2;
9151 break;
9153 case GOMP_MAP_ATTACH:
9154 case GOMP_MAP_DETACH:
9155 /* This is a weird artifact of how directives are parsed: bare attach or
9156 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9157 FIRSTPRIVATE_REFERENCE node. FIXME. */
9158 if (nc
9159 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9160 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9161 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER))
9162 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9163 break;
9165 case GOMP_MAP_TO_PSET:
9166 if (OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9167 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH
9168 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH))
9169 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9170 break;
9172 case GOMP_MAP_STRUCT:
9174 unsigned HOST_WIDE_INT num_mappings
9175 = tree_to_uhwi (OMP_CLAUSE_SIZE (c));
9176 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9177 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9178 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH)
9179 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9180 for (unsigned i = 0; i < num_mappings; i++)
9181 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9183 break;
9186 return grp_last_p;
9189 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9190 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9191 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9192 if we have more than one such group, else return NULL. */
9194 static void
9195 omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
9196 tree gather_sentinel)
9198 for (tree *cp = list_p;
9199 *cp && *cp != gather_sentinel;
9200 cp = &OMP_CLAUSE_CHAIN (*cp))
9202 if (OMP_CLAUSE_CODE (*cp) != OMP_CLAUSE_MAP)
9203 continue;
9205 tree *grp_last_p = omp_group_last (cp);
9206 omp_mapping_group grp;
9208 grp.grp_start = cp;
9209 grp.grp_end = *grp_last_p;
9210 grp.mark = UNVISITED;
9211 grp.sibling = NULL;
9212 grp.deleted = false;
9213 grp.next = NULL;
9214 groups->safe_push (grp);
9216 cp = grp_last_p;
9220 static vec<omp_mapping_group> *
9221 omp_gather_mapping_groups (tree *list_p)
9223 vec<omp_mapping_group> *groups = new vec<omp_mapping_group> ();
9225 omp_gather_mapping_groups_1 (list_p, groups, NULL_TREE);
9227 if (groups->length () > 0)
9228 return groups;
9229 else
9231 delete groups;
9232 return NULL;
9236 /* A pointer mapping group GRP may define a block of memory starting at some
9237 base address, and maybe also define a firstprivate pointer or firstprivate
9238 reference that points to that block. The return value is a node containing
9239 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9240 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9241 return the number of consecutive chained nodes in CHAINED. */
9243 static tree
9244 omp_group_base (omp_mapping_group *grp, unsigned int *chained,
9245 tree *firstprivate)
9247 tree node = *grp->grp_start;
9249 *firstprivate = NULL_TREE;
9250 *chained = 1;
9252 switch (OMP_CLAUSE_MAP_KIND (node))
9254 case GOMP_MAP_TO:
9255 case GOMP_MAP_FROM:
9256 case GOMP_MAP_TOFROM:
9257 case GOMP_MAP_ALWAYS_FROM:
9258 case GOMP_MAP_ALWAYS_TO:
9259 case GOMP_MAP_ALWAYS_TOFROM:
9260 case GOMP_MAP_FORCE_FROM:
9261 case GOMP_MAP_FORCE_TO:
9262 case GOMP_MAP_FORCE_TOFROM:
9263 case GOMP_MAP_FORCE_PRESENT:
9264 case GOMP_MAP_ALLOC:
9265 case GOMP_MAP_RELEASE:
9266 case GOMP_MAP_DELETE:
9267 case GOMP_MAP_FORCE_ALLOC:
9268 case GOMP_MAP_IF_PRESENT:
9269 if (node == grp->grp_end)
9270 return node;
9272 node = OMP_CLAUSE_CHAIN (node);
9273 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9275 if (node == grp->grp_end)
9276 return *grp->grp_start;
9277 node = OMP_CLAUSE_CHAIN (node);
9279 if (node)
9280 switch (OMP_CLAUSE_MAP_KIND (node))
9282 case GOMP_MAP_POINTER:
9283 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9284 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9285 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9286 *firstprivate = OMP_CLAUSE_DECL (node);
9287 return *grp->grp_start;
9289 case GOMP_MAP_ALWAYS_POINTER:
9290 case GOMP_MAP_ATTACH_DETACH:
9291 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9292 return *grp->grp_start;
9294 default:
9295 internal_error ("unexpected mapping node");
9297 else
9298 internal_error ("unexpected mapping node");
9299 return error_mark_node;
9301 case GOMP_MAP_TO_PSET:
9302 gcc_assert (node != grp->grp_end);
9303 node = OMP_CLAUSE_CHAIN (node);
9304 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9305 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9306 return NULL_TREE;
9307 else
9308 internal_error ("unexpected mapping node");
9309 return error_mark_node;
9311 case GOMP_MAP_ATTACH:
9312 case GOMP_MAP_DETACH:
9313 node = OMP_CLAUSE_CHAIN (node);
9314 if (!node || *grp->grp_start == grp->grp_end)
9315 return NULL_TREE;
9316 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9317 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9319 /* We're mapping the base pointer itself in a bare attach or detach
9320 node. This is a side effect of how parsing works, and the mapping
9321 will be removed anyway (at least for enter/exit data directives).
9322 We should ignore the mapping here. FIXME. */
9323 return NULL_TREE;
9325 else
9326 internal_error ("unexpected mapping node");
9327 return error_mark_node;
9329 case GOMP_MAP_STRUCT:
9331 unsigned HOST_WIDE_INT num_mappings
9332 = tree_to_uhwi (OMP_CLAUSE_SIZE (node));
9333 node = OMP_CLAUSE_CHAIN (node);
9334 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9335 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9337 *firstprivate = OMP_CLAUSE_DECL (node);
9338 node = OMP_CLAUSE_CHAIN (node);
9340 *chained = num_mappings;
9341 return node;
9344 case GOMP_MAP_FORCE_DEVICEPTR:
9345 case GOMP_MAP_DEVICE_RESIDENT:
9346 case GOMP_MAP_LINK:
9347 case GOMP_MAP_FIRSTPRIVATE:
9348 case GOMP_MAP_FIRSTPRIVATE_INT:
9349 case GOMP_MAP_USE_DEVICE_PTR:
9350 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9351 return NULL_TREE;
9353 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9354 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9355 case GOMP_MAP_POINTER:
9356 case GOMP_MAP_ALWAYS_POINTER:
9357 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9358 /* These shouldn't appear by themselves. */
9359 if (!seen_error ())
9360 internal_error ("unexpected pointer mapping node");
9361 return error_mark_node;
9363 default:
9364 gcc_unreachable ();
9367 return error_mark_node;
9370 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9371 nodes by tree_operand_hash. */
9373 static void
9374 omp_index_mapping_groups_1 (hash_map<tree_operand_hash,
9375 omp_mapping_group *> *grpmap,
9376 vec<omp_mapping_group> *groups,
9377 tree reindex_sentinel)
9379 omp_mapping_group *grp;
9380 unsigned int i;
9381 bool reindexing = reindex_sentinel != NULL_TREE, above_hwm = false;
9383 FOR_EACH_VEC_ELT (*groups, i, grp)
9385 if (reindexing && *grp->grp_start == reindex_sentinel)
9386 above_hwm = true;
9388 if (reindexing && !above_hwm)
9389 continue;
9391 tree fpp;
9392 unsigned int chained;
9393 tree node = omp_group_base (grp, &chained, &fpp);
9395 if (node == error_mark_node || (!node && !fpp))
9396 continue;
9398 for (unsigned j = 0;
9399 node && j < chained;
9400 node = OMP_CLAUSE_CHAIN (node), j++)
9402 tree decl = OMP_CLAUSE_DECL (node);
9404 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9405 meaning node-hash lookups don't work. This is a workaround for
9406 that, but ideally we should just create the INDIRECT_REF at
9407 source instead. FIXME. */
9408 if (TREE_CODE (decl) == MEM_REF
9409 && integer_zerop (TREE_OPERAND (decl, 1)))
9410 decl = build_fold_indirect_ref (TREE_OPERAND (decl, 0));
9412 omp_mapping_group **prev = grpmap->get (decl);
9414 if (prev && *prev == grp)
9415 /* Empty. */;
9416 else if (prev)
9418 /* Mapping the same thing twice is normally diagnosed as an error,
9419 but can happen under some circumstances, e.g. in pr99928-16.c,
9420 the directive:
9422 #pragma omp target simd reduction(+:a[:3]) \
9423 map(always, tofrom: a[:6])
9426 will result in two "a[0]" mappings (of different sizes). */
9428 grp->sibling = (*prev)->sibling;
9429 (*prev)->sibling = grp;
9431 else
9432 grpmap->put (decl, grp);
9435 if (!fpp)
9436 continue;
9438 omp_mapping_group **prev = grpmap->get (fpp);
9439 if (prev && *prev != grp)
9441 grp->sibling = (*prev)->sibling;
9442 (*prev)->sibling = grp;
9444 else
9445 grpmap->put (fpp, grp);
9449 static hash_map<tree_operand_hash, omp_mapping_group *> *
9450 omp_index_mapping_groups (vec<omp_mapping_group> *groups)
9452 hash_map<tree_operand_hash, omp_mapping_group *> *grpmap
9453 = new hash_map<tree_operand_hash, omp_mapping_group *>;
9455 omp_index_mapping_groups_1 (grpmap, groups, NULL_TREE);
9457 return grpmap;
9460 /* Rebuild group map from partially-processed clause list (during
9461 omp_build_struct_sibling_lists). We have already processed nodes up until
9462 a high-water mark (HWM). This is a bit tricky because the list is being
9463 reordered as it is scanned, but we know:
9465 1. The list after HWM has not been touched yet, so we can reindex it safely.
9467 2. The list before and including HWM has been altered, but remains
9468 well-formed throughout the sibling-list building operation.
9470 so, we can do the reindex operation in two parts, on the processed and
9471 then the unprocessed halves of the list. */
9473 static hash_map<tree_operand_hash, omp_mapping_group *> *
9474 omp_reindex_mapping_groups (tree *list_p,
9475 vec<omp_mapping_group> *groups,
9476 vec<omp_mapping_group> *processed_groups,
9477 tree sentinel)
9479 hash_map<tree_operand_hash, omp_mapping_group *> *grpmap
9480 = new hash_map<tree_operand_hash, omp_mapping_group *>;
9482 processed_groups->truncate (0);
9484 omp_gather_mapping_groups_1 (list_p, processed_groups, sentinel);
9485 omp_index_mapping_groups_1 (grpmap, processed_groups, NULL_TREE);
9486 if (sentinel)
9487 omp_index_mapping_groups_1 (grpmap, groups, sentinel);
9489 return grpmap;
9492 /* Find the immediately-containing struct for a component ref (etc.)
9493 expression EXPR. */
9495 static tree
9496 omp_containing_struct (tree expr)
9498 tree expr0 = expr;
9500 STRIP_NOPS (expr);
9502 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9503 component ref. */
9504 if (TREE_CODE (expr) != ARRAY_REF && TREE_CODE (expr) != COMPONENT_REF)
9505 return expr0;
9507 while (TREE_CODE (expr) == ARRAY_REF)
9508 expr = TREE_OPERAND (expr, 0);
9510 if (TREE_CODE (expr) == COMPONENT_REF)
9511 expr = TREE_OPERAND (expr, 0);
9513 return expr;
9516 /* Return TRUE if DECL describes a component that is part of a whole structure
9517 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9518 that maps that structure, if present. */
9520 static bool
9521 omp_mapped_by_containing_struct (hash_map<tree_operand_hash,
9522 omp_mapping_group *> *grpmap,
9523 tree decl,
9524 omp_mapping_group **mapped_by_group)
9526 tree wsdecl = NULL_TREE;
9528 *mapped_by_group = NULL;
9530 while (true)
9532 wsdecl = omp_containing_struct (decl);
9533 if (wsdecl == decl)
9534 break;
9535 omp_mapping_group **wholestruct = grpmap->get (wsdecl);
9536 if (!wholestruct
9537 && TREE_CODE (wsdecl) == MEM_REF
9538 && integer_zerop (TREE_OPERAND (wsdecl, 1)))
9540 tree deref = TREE_OPERAND (wsdecl, 0);
9541 deref = build_fold_indirect_ref (deref);
9542 wholestruct = grpmap->get (deref);
9544 if (wholestruct)
9546 *mapped_by_group = *wholestruct;
9547 return true;
9549 decl = wsdecl;
9552 return false;
9555 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9556 FALSE on error. */
9558 static bool
9559 omp_tsort_mapping_groups_1 (omp_mapping_group ***outlist,
9560 vec<omp_mapping_group> *groups,
9561 hash_map<tree_operand_hash, omp_mapping_group *>
9562 *grpmap,
9563 omp_mapping_group *grp)
9565 if (grp->mark == PERMANENT)
9566 return true;
9567 if (grp->mark == TEMPORARY)
9569 fprintf (stderr, "when processing group:\n");
9570 debug_mapping_group (grp);
9571 internal_error ("base pointer cycle detected");
9572 return false;
9574 grp->mark = TEMPORARY;
9576 tree attaches_to = omp_get_attachment (grp);
9578 if (attaches_to)
9580 omp_mapping_group **basep = grpmap->get (attaches_to);
9582 if (basep && *basep != grp)
9584 for (omp_mapping_group *w = *basep; w; w = w->sibling)
9585 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9586 return false;
9590 tree decl = OMP_CLAUSE_DECL (*grp->grp_start);
9592 while (decl)
9594 tree base = omp_get_base_pointer (decl);
9596 if (!base)
9597 break;
9599 omp_mapping_group **innerp = grpmap->get (base);
9600 omp_mapping_group *wholestruct;
9602 /* We should treat whole-structure mappings as if all (pointer, in this
9603 case) members are mapped as individual list items. Check if we have
9604 such a whole-structure mapping, if we don't have an explicit reference
9605 to the pointer member itself. */
9606 if (!innerp
9607 && TREE_CODE (base) == COMPONENT_REF
9608 && omp_mapped_by_containing_struct (grpmap, base, &wholestruct))
9609 innerp = &wholestruct;
9611 if (innerp && *innerp != grp)
9613 for (omp_mapping_group *w = *innerp; w; w = w->sibling)
9614 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9615 return false;
9616 break;
9619 decl = base;
9622 grp->mark = PERMANENT;
9624 /* Emit grp to output list. */
9626 **outlist = grp;
9627 *outlist = &grp->next;
9629 return true;
9632 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
9633 before mappings that use those pointers. This is an implementation of the
9634 depth-first search algorithm, described e.g. at:
9636 https://en.wikipedia.org/wiki/Topological_sorting
9639 static omp_mapping_group *
9640 omp_tsort_mapping_groups (vec<omp_mapping_group> *groups,
9641 hash_map<tree_operand_hash, omp_mapping_group *>
9642 *grpmap)
9644 omp_mapping_group *grp, *outlist = NULL, **cursor;
9645 unsigned int i;
9647 cursor = &outlist;
9649 FOR_EACH_VEC_ELT (*groups, i, grp)
9651 if (grp->mark != PERMANENT)
9652 if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
9653 return NULL;
9656 return outlist;
9659 /* Split INLIST into two parts, moving groups corresponding to
9660 ALLOC/RELEASE/DELETE mappings to one list, and other mappings to another.
9661 The former list is then appended to the latter. Each sub-list retains the
9662 order of the original list.
9663 Note that ATTACH nodes are later moved to the end of the list in
9664 gimplify_adjust_omp_clauses, for target regions. */
9666 static omp_mapping_group *
9667 omp_segregate_mapping_groups (omp_mapping_group *inlist)
9669 omp_mapping_group *ard_groups = NULL, *tf_groups = NULL;
9670 omp_mapping_group **ard_tail = &ard_groups, **tf_tail = &tf_groups;
9672 for (omp_mapping_group *w = inlist; w;)
9674 tree c = *w->grp_start;
9675 omp_mapping_group *next = w->next;
9677 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9679 switch (OMP_CLAUSE_MAP_KIND (c))
9681 case GOMP_MAP_ALLOC:
9682 case GOMP_MAP_RELEASE:
9683 case GOMP_MAP_DELETE:
9684 *ard_tail = w;
9685 w->next = NULL;
9686 ard_tail = &w->next;
9687 break;
9689 default:
9690 *tf_tail = w;
9691 w->next = NULL;
9692 tf_tail = &w->next;
9695 w = next;
9698 /* Now splice the lists together... */
9699 *tf_tail = ard_groups;
9701 return tf_groups;
9704 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
9705 those groups based on the output list of omp_tsort_mapping_groups --
9706 singly-linked, threaded through each element's NEXT pointer starting at
9707 HEAD. Each list element appears exactly once in that linked list.
9709 Each element of GROUPS may correspond to one or several mapping nodes.
9710 Node groups are kept together, and in the reordered list, the positions of
9711 the original groups are reused for the positions of the reordered list.
9712 Hence if we have e.g.
9714 {to ptr ptr} firstprivate {tofrom ptr} ...
9715 ^ ^ ^
9716 first group non-"map" second group
9718 and say the second group contains a base pointer for the first so must be
9719 moved before it, the resulting list will contain:
9721 {tofrom ptr} firstprivate {to ptr ptr} ...
9722 ^ prev. second group ^ prev. first group
9725 static tree *
9726 omp_reorder_mapping_groups (vec<omp_mapping_group> *groups,
9727 omp_mapping_group *head,
9728 tree *list_p)
9730 omp_mapping_group *grp;
9731 unsigned int i;
9732 unsigned numgroups = groups->length ();
9733 auto_vec<tree> old_heads (numgroups);
9734 auto_vec<tree *> old_headps (numgroups);
9735 auto_vec<tree> new_heads (numgroups);
9736 auto_vec<tree> old_succs (numgroups);
9737 bool map_at_start = (list_p == (*groups)[0].grp_start);
9739 tree *new_grp_tail = NULL;
9741 /* Stash the start & end nodes of each mapping group before we start
9742 modifying the list. */
9743 FOR_EACH_VEC_ELT (*groups, i, grp)
9745 old_headps.quick_push (grp->grp_start);
9746 old_heads.quick_push (*grp->grp_start);
9747 old_succs.quick_push (OMP_CLAUSE_CHAIN (grp->grp_end));
9750 /* And similarly, the heads of the groups in the order we want to rearrange
9751 the list to. */
9752 for (omp_mapping_group *w = head; w; w = w->next)
9753 new_heads.quick_push (*w->grp_start);
9755 FOR_EACH_VEC_ELT (*groups, i, grp)
9757 gcc_assert (head);
9759 if (new_grp_tail && old_succs[i - 1] == old_heads[i])
9761 /* a {b c d} {e f g} h i j (original)
9763 a {k l m} {e f g} h i j (inserted new group on last iter)
9765 a {k l m} {n o p} h i j (this time, chain last group to new one)
9766 ^new_grp_tail
9768 *new_grp_tail = new_heads[i];
9770 else if (new_grp_tail)
9772 /* a {b c d} e {f g h} i j k (original)
9774 a {l m n} e {f g h} i j k (gap after last iter's group)
9776 a {l m n} e {o p q} h i j (chain last group to old successor)
9777 ^new_grp_tail
9779 *new_grp_tail = old_succs[i - 1];
9780 *old_headps[i] = new_heads[i];
9782 else
9784 /* The first inserted group -- point to new group, and leave end
9785 open.
9786 a {b c d} e f
9788 a {g h i...
9790 *grp->grp_start = new_heads[i];
9793 new_grp_tail = &OMP_CLAUSE_CHAIN (head->grp_end);
9795 head = head->next;
9798 if (new_grp_tail)
9799 *new_grp_tail = old_succs[numgroups - 1];
9801 gcc_assert (!head);
9803 return map_at_start ? (*groups)[0].grp_start : list_p;
9806 /* DECL is supposed to have lastprivate semantics in the outer contexts
9807 of combined/composite constructs, starting with OCTX.
9808 Add needed lastprivate, shared or map clause if no data sharing or
9809 mapping clause are present. IMPLICIT_P is true if it is an implicit
9810 clause (IV on simd), in which case the lastprivate will not be
9811 copied to some constructs. */
9813 static void
9814 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
9815 tree decl, bool implicit_p)
9817 struct gimplify_omp_ctx *orig_octx = octx;
9818 for (; octx; octx = octx->outer_context)
9820 if ((octx->region_type == ORT_COMBINED_PARALLEL
9821 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
9822 && splay_tree_lookup (octx->variables,
9823 (splay_tree_key) decl) == NULL)
9825 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
9826 continue;
9828 if ((octx->region_type & ORT_TASK) != 0
9829 && octx->combined_loop
9830 && splay_tree_lookup (octx->variables,
9831 (splay_tree_key) decl) == NULL)
9833 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9834 continue;
9836 if (implicit_p
9837 && octx->region_type == ORT_WORKSHARE
9838 && octx->combined_loop
9839 && splay_tree_lookup (octx->variables,
9840 (splay_tree_key) decl) == NULL
9841 && octx->outer_context
9842 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
9843 && splay_tree_lookup (octx->outer_context->variables,
9844 (splay_tree_key) decl) == NULL)
9846 octx = octx->outer_context;
9847 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9848 continue;
9850 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
9851 && octx->combined_loop
9852 && splay_tree_lookup (octx->variables,
9853 (splay_tree_key) decl) == NULL
9854 && !omp_check_private (octx, decl, false))
9856 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9857 continue;
9859 if (octx->region_type == ORT_COMBINED_TARGET)
9861 splay_tree_node n = splay_tree_lookup (octx->variables,
9862 (splay_tree_key) decl);
9863 if (n == NULL)
9865 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9866 octx = octx->outer_context;
9868 else if (!implicit_p
9869 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
9871 n->value &= ~(GOVD_FIRSTPRIVATE
9872 | GOVD_FIRSTPRIVATE_IMPLICIT
9873 | GOVD_EXPLICIT);
9874 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9875 octx = octx->outer_context;
9878 break;
9880 if (octx && (implicit_p || octx != orig_octx))
9881 omp_notice_variable (octx, decl, true);
9884 /* If we have mappings INNER and OUTER, where INNER is a component access and
9885 OUTER is a mapping of the whole containing struct, check that the mappings
9886 are compatible. We'll be deleting the inner mapping, so we need to make
9887 sure the outer mapping does (at least) the same transfers to/from the device
9888 as the inner mapping. */
9890 bool
9891 omp_check_mapping_compatibility (location_t loc,
9892 omp_mapping_group *outer,
9893 omp_mapping_group *inner)
9895 tree first_outer = *outer->grp_start, first_inner = *inner->grp_start;
9897 gcc_assert (OMP_CLAUSE_CODE (first_outer) == OMP_CLAUSE_MAP);
9898 gcc_assert (OMP_CLAUSE_CODE (first_inner) == OMP_CLAUSE_MAP);
9900 enum gomp_map_kind outer_kind = OMP_CLAUSE_MAP_KIND (first_outer);
9901 enum gomp_map_kind inner_kind = OMP_CLAUSE_MAP_KIND (first_inner);
9903 if (outer_kind == inner_kind)
9904 return true;
9906 switch (outer_kind)
9908 case GOMP_MAP_ALWAYS_TO:
9909 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9910 || inner_kind == GOMP_MAP_ALLOC
9911 || inner_kind == GOMP_MAP_TO)
9912 return true;
9913 break;
9915 case GOMP_MAP_ALWAYS_FROM:
9916 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9917 || inner_kind == GOMP_MAP_ALLOC
9918 || inner_kind == GOMP_MAP_FROM)
9919 return true;
9920 break;
9922 case GOMP_MAP_TO:
9923 case GOMP_MAP_FROM:
9924 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9925 || inner_kind == GOMP_MAP_ALLOC)
9926 return true;
9927 break;
9929 case GOMP_MAP_ALWAYS_TOFROM:
9930 case GOMP_MAP_TOFROM:
9931 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9932 || inner_kind == GOMP_MAP_ALLOC
9933 || inner_kind == GOMP_MAP_TO
9934 || inner_kind == GOMP_MAP_FROM
9935 || inner_kind == GOMP_MAP_TOFROM)
9936 return true;
9937 break;
9939 default:
9943 error_at (loc, "data movement for component %qE is not compatible with "
9944 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner),
9945 OMP_CLAUSE_DECL (first_outer));
9947 return false;
9950 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
9951 clause dependencies we handle for now are struct element mappings and
9952 whole-struct mappings on the same directive, and duplicate clause
9953 detection. */
9955 void
9956 oacc_resolve_clause_dependencies (vec<omp_mapping_group> *groups,
9957 hash_map<tree_operand_hash,
9958 omp_mapping_group *> *grpmap)
9960 int i;
9961 omp_mapping_group *grp;
9962 hash_set<tree_operand_hash> *seen_components = NULL;
9963 hash_set<tree_operand_hash> *shown_error = NULL;
9965 FOR_EACH_VEC_ELT (*groups, i, grp)
9967 tree grp_end = grp->grp_end;
9968 tree decl = OMP_CLAUSE_DECL (grp_end);
9970 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
9972 if (DECL_P (grp_end))
9973 continue;
9975 tree c = OMP_CLAUSE_DECL (*grp->grp_start);
9976 while (TREE_CODE (c) == ARRAY_REF)
9977 c = TREE_OPERAND (c, 0);
9978 if (TREE_CODE (c) != COMPONENT_REF)
9979 continue;
9980 if (!seen_components)
9981 seen_components = new hash_set<tree_operand_hash> ();
9982 if (!shown_error)
9983 shown_error = new hash_set<tree_operand_hash> ();
9984 if (seen_components->contains (c)
9985 && !shown_error->contains (c))
9987 error_at (OMP_CLAUSE_LOCATION (grp_end),
9988 "%qE appears more than once in map clauses",
9989 OMP_CLAUSE_DECL (grp_end));
9990 shown_error->add (c);
9992 else
9993 seen_components->add (c);
9995 omp_mapping_group *struct_group;
9996 if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
9997 && *grp->grp_start == grp_end)
9999 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10000 struct_group, grp);
10001 /* Remove the whole of this mapping -- redundant. */
10002 grp->deleted = true;
10006 if (seen_components)
10007 delete seen_components;
10008 if (shown_error)
10009 delete shown_error;
10012 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10013 is linked to the previous node pointed to by INSERT_AT. */
10015 static tree *
10016 omp_siblist_insert_node_after (tree newnode, tree *insert_at)
10018 OMP_CLAUSE_CHAIN (newnode) = *insert_at;
10019 *insert_at = newnode;
10020 return &OMP_CLAUSE_CHAIN (newnode);
10023 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10024 pointed to by chain MOVE_AFTER instead. */
10026 static void
10027 omp_siblist_move_node_after (tree node, tree *old_pos, tree *move_after)
10029 gcc_assert (node == *old_pos);
10030 *old_pos = OMP_CLAUSE_CHAIN (node);
10031 OMP_CLAUSE_CHAIN (node) = *move_after;
10032 *move_after = node;
10035 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10036 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10037 new nodes are prepended to the list before splicing into the new position.
10038 Return the position we should continue scanning the list at, or NULL to
10039 stay where we were. */
10041 static tree *
10042 omp_siblist_move_nodes_after (tree *first_ptr, tree last_node,
10043 tree *move_after)
10045 if (first_ptr == move_after)
10046 return NULL;
10048 tree tmp = *first_ptr;
10049 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10050 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10051 *move_after = tmp;
10053 return first_ptr;
10056 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10057 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10058 pointer MOVE_AFTER.
10060 The latter list was previously part of the OMP clause list, and the former
10061 (prepended) part is comprised of new nodes.
10063 We start with a list of nodes starting with a struct mapping node. We
10064 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10065 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10066 the group of mapping nodes we are currently processing (from the chain
10067 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10068 we should continue processing from, or NULL to stay where we were.
10070 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10071 different) is worked through below. Here we are processing LAST_NODE, and
10072 FIRST_PTR points at the preceding mapping clause:
10074 #. mapping node chain
10075 ---------------------------------------------------
10076 A. struct_node [->B]
10077 B. comp_1 [->C]
10078 C. comp_2 [->D (move_after)]
10079 D. map_to_3 [->E]
10080 E. attach_3 [->F (first_ptr)]
10081 F. map_to_4 [->G (continue_at)]
10082 G. attach_4 (last_node) [->H]
10083 H. ...
10085 *last_new_tail = *first_ptr;
10087 I. new_node (first_new) [->F (last_new_tail)]
10089 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10091 #. mapping node chain
10092 ----------------------------------------------------
10093 A. struct_node [->B]
10094 B. comp_1 [->C]
10095 C. comp_2 [->D (move_after)]
10096 D. map_to_3 [->E]
10097 E. attach_3 [->H (first_ptr)]
10098 F. map_to_4 [->G (continue_at)]
10099 G. attach_4 (last_node) [->H]
10100 H. ...
10102 I. new_node (first_new) [->F (last_new_tail)]
10104 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10106 #. mapping node chain
10107 ---------------------------------------------------
10108 A. struct_node [->B]
10109 B. comp_1 [->C]
10110 C. comp_2 [->D (move_after)]
10111 D. map_to_3 [->E]
10112 E. attach_3 [->H (continue_at)]
10113 F. map_to_4 [->G]
10114 G. attach_4 (last_node) [->D]
10115 H. ...
10117 I. new_node (first_new) [->F (last_new_tail)]
10119 *move_after = first_new;
10121 #. mapping node chain
10122 ---------------------------------------------------
10123 A. struct_node [->B]
10124 B. comp_1 [->C]
10125 C. comp_2 [->I (move_after)]
10126 D. map_to_3 [->E]
10127 E. attach_3 [->H (continue_at)]
10128 F. map_to_4 [->G]
10129 G. attach_4 (last_node) [->D]
10130 H. ...
10131 I. new_node (first_new) [->F (last_new_tail)]
10133 or, in order:
10135 #. mapping node chain
10136 ---------------------------------------------------
10137 A. struct_node [->B]
10138 B. comp_1 [->C]
10139 C. comp_2 [->I (move_after)]
10140 I. new_node (first_new) [->F (last_new_tail)]
10141 F. map_to_4 [->G]
10142 G. attach_4 (last_node) [->D]
10143 D. map_to_3 [->E]
10144 E. attach_3 [->H (continue_at)]
10145 H. ...
10148 static tree *
10149 omp_siblist_move_concat_nodes_after (tree first_new, tree *last_new_tail,
10150 tree *first_ptr, tree last_node,
10151 tree *move_after)
10153 tree *continue_at = NULL;
10154 *last_new_tail = *first_ptr;
10155 if (first_ptr == move_after)
10156 *move_after = first_new;
10157 else
10159 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10160 continue_at = first_ptr;
10161 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10162 *move_after = first_new;
10164 return continue_at;
10167 /* Mapping struct members causes an additional set of nodes to be created,
10168 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10169 number of members being mapped, in order of ascending position (address or
10170 bitwise).
10172 We scan through the list of mapping clauses, calling this function for each
10173 struct member mapping we find, and build up the list of mappings after the
10174 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10175 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10176 moved into place in the sorted list.
10178 struct {
10179 int *a;
10180 int *b;
10181 int c;
10182 int *d;
10185 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
10186 struct.d[0:n])
10188 GOMP_MAP_STRUCT (4)
10189 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
10190 GOMP_MAP_ALLOC (struct.a)
10191 GOMP_MAP_ALLOC (struct.b)
10192 GOMP_MAP_TO (struct.c)
10193 GOMP_MAP_ALLOC (struct.d)
10196 In the case where we are mapping references to pointers, or in Fortran if
10197 we are mapping an array with a descriptor, additional nodes may be created
10198 after the struct node list also.
10200 The return code is either a pointer to the next node to process (if the
10201 list has been rearranged), else NULL to continue with the next node in the
10202 original list. */
10204 static tree *
10205 omp_accumulate_sibling_list (enum omp_region_type region_type,
10206 enum tree_code code,
10207 hash_map<tree_operand_hash, tree>
10208 *&struct_map_to_clause, tree *grp_start_p,
10209 tree grp_end, tree *inner)
10211 poly_offset_int coffset;
10212 poly_int64 cbitpos;
10213 tree ocd = OMP_CLAUSE_DECL (grp_end);
10214 bool openmp = !(region_type & ORT_ACC);
10215 tree *continue_at = NULL;
10217 while (TREE_CODE (ocd) == ARRAY_REF)
10218 ocd = TREE_OPERAND (ocd, 0);
10220 if (TREE_CODE (ocd) == INDIRECT_REF)
10221 ocd = TREE_OPERAND (ocd, 0);
10223 tree base = extract_base_bit_offset (ocd, &cbitpos, &coffset);
10225 bool ptr = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ALWAYS_POINTER);
10226 bool attach_detach = ((OMP_CLAUSE_MAP_KIND (grp_end)
10227 == GOMP_MAP_ATTACH_DETACH)
10228 || (OMP_CLAUSE_MAP_KIND (grp_end)
10229 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
10230 bool attach = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ATTACH
10231 || OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_DETACH);
10233 /* FIXME: If we're not mapping the base pointer in some other clause on this
10234 directive, I think we want to create ALLOC/RELEASE here -- i.e. not
10235 early-exit. */
10236 if (openmp && attach_detach)
10237 return NULL;
10239 if (!struct_map_to_clause || struct_map_to_clause->get (base) == NULL)
10241 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
10242 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT : GOMP_MAP_STRUCT;
10244 OMP_CLAUSE_SET_MAP_KIND (l, k);
10246 OMP_CLAUSE_DECL (l) = unshare_expr (base);
10248 OMP_CLAUSE_SIZE (l)
10249 = (!attach ? size_int (1)
10250 : (DECL_P (OMP_CLAUSE_DECL (l))
10251 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
10252 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l)))));
10253 if (struct_map_to_clause == NULL)
10254 struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
10255 struct_map_to_clause->put (base, l);
10257 if (ptr || attach_detach)
10259 tree extra_node;
10260 tree alloc_node
10261 = build_omp_struct_comp_nodes (code, *grp_start_p, grp_end,
10262 &extra_node);
10263 OMP_CLAUSE_CHAIN (l) = alloc_node;
10265 tree *insert_node_pos = grp_start_p;
10267 if (extra_node)
10269 OMP_CLAUSE_CHAIN (extra_node) = *insert_node_pos;
10270 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10272 else
10273 OMP_CLAUSE_CHAIN (alloc_node) = *insert_node_pos;
10275 *insert_node_pos = l;
10277 else
10279 gcc_assert (*grp_start_p == grp_end);
10280 grp_start_p = omp_siblist_insert_node_after (l, grp_start_p);
10283 tree noind = omp_strip_indirections (base);
10285 if (!openmp
10286 && (region_type & ORT_TARGET)
10287 && TREE_CODE (noind) == COMPONENT_REF)
10289 /* The base for this component access is a struct component access
10290 itself. Insert a node to be processed on the next iteration of
10291 our caller's loop, which will subsequently be turned into a new,
10292 inner GOMP_MAP_STRUCT mapping.
10294 We need to do this else the non-DECL_P base won't be
10295 rewritten correctly in the offloaded region. */
10296 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10297 OMP_CLAUSE_MAP);
10298 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FORCE_PRESENT);
10299 OMP_CLAUSE_DECL (c2) = unshare_expr (noind);
10300 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (TREE_TYPE (noind));
10301 *inner = c2;
10302 return NULL;
10305 tree sdecl = omp_strip_components_and_deref (base);
10307 if (POINTER_TYPE_P (TREE_TYPE (sdecl)) && (region_type & ORT_TARGET))
10309 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10310 OMP_CLAUSE_MAP);
10311 bool base_ref
10312 = (TREE_CODE (base) == INDIRECT_REF
10313 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
10314 == REFERENCE_TYPE)
10315 || ((TREE_CODE (TREE_OPERAND (base, 0))
10316 == INDIRECT_REF)
10317 && (TREE_CODE (TREE_TYPE (TREE_OPERAND
10318 (TREE_OPERAND (base, 0), 0)))
10319 == REFERENCE_TYPE))));
10320 enum gomp_map_kind mkind = base_ref ? GOMP_MAP_FIRSTPRIVATE_REFERENCE
10321 : GOMP_MAP_FIRSTPRIVATE_POINTER;
10322 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
10323 OMP_CLAUSE_DECL (c2) = sdecl;
10324 tree baddr = build_fold_addr_expr (base);
10325 baddr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10326 ptrdiff_type_node, baddr);
10327 /* This isn't going to be good enough when we add support for more
10328 complicated lvalue expressions. FIXME. */
10329 if (TREE_CODE (TREE_TYPE (sdecl)) == REFERENCE_TYPE
10330 && TREE_CODE (TREE_TYPE (TREE_TYPE (sdecl))) == POINTER_TYPE)
10331 sdecl = build_simple_mem_ref (sdecl);
10332 tree decladdr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10333 ptrdiff_type_node, sdecl);
10334 OMP_CLAUSE_SIZE (c2)
10335 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
10336 ptrdiff_type_node, baddr, decladdr);
10337 /* Insert after struct node. */
10338 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
10339 OMP_CLAUSE_CHAIN (l) = c2;
10342 return NULL;
10344 else if (struct_map_to_clause)
10346 tree *osc = struct_map_to_clause->get (base);
10347 tree *sc = NULL, *scp = NULL;
10348 sc = &OMP_CLAUSE_CHAIN (*osc);
10349 /* The struct mapping might be immediately followed by a
10350 FIRSTPRIVATE_POINTER and/or FIRSTPRIVATE_REFERENCE -- if it's an
10351 indirect access or a reference, or both. (This added node is removed
10352 in omp-low.c after it has been processed there.) */
10353 if (*sc != grp_end
10354 && (OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_POINTER
10355 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10356 sc = &OMP_CLAUSE_CHAIN (*sc);
10357 for (; *sc != grp_end; sc = &OMP_CLAUSE_CHAIN (*sc))
10358 if ((ptr || attach_detach) && sc == grp_start_p)
10359 break;
10360 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) != COMPONENT_REF
10361 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != INDIRECT_REF
10362 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != ARRAY_REF)
10363 break;
10364 else
10366 tree sc_decl = OMP_CLAUSE_DECL (*sc);
10367 poly_offset_int offset;
10368 poly_int64 bitpos;
10370 if (TREE_CODE (sc_decl) == ARRAY_REF)
10372 while (TREE_CODE (sc_decl) == ARRAY_REF)
10373 sc_decl = TREE_OPERAND (sc_decl, 0);
10374 if (TREE_CODE (sc_decl) != COMPONENT_REF
10375 || TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
10376 break;
10378 else if (TREE_CODE (sc_decl) == INDIRECT_REF
10379 && TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
10380 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
10381 == REFERENCE_TYPE))
10382 sc_decl = TREE_OPERAND (sc_decl, 0);
10384 tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset);
10385 if (!base2 || !operand_equal_p (base2, base, 0))
10386 break;
10387 if (scp)
10388 continue;
10389 if (maybe_lt (coffset, offset)
10390 || (known_eq (coffset, offset)
10391 && maybe_lt (cbitpos, bitpos)))
10393 if (ptr || attach_detach)
10394 scp = sc;
10395 else
10396 break;
10400 if (!attach)
10401 OMP_CLAUSE_SIZE (*osc)
10402 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
10403 if (ptr || attach_detach)
10405 tree cl = NULL_TREE, extra_node;
10406 tree alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
10407 grp_end, &extra_node);
10408 tree *tail_chain = NULL;
10410 /* Here, we have:
10412 grp_end : the last (or only) node in this group.
10413 grp_start_p : pointer to the first node in a pointer mapping group
10414 up to and including GRP_END.
10415 sc : pointer to the chain for the end of the struct component
10416 list.
10417 scp : pointer to the chain for the sorted position at which we
10418 should insert in the middle of the struct component list
10419 (else NULL to insert at end).
10420 alloc_node : the "alloc" node for the structure (pointer-type)
10421 component. We insert at SCP (if present), else SC
10422 (the end of the struct component list).
10423 extra_node : a newly-synthesized node for an additional indirect
10424 pointer mapping or a Fortran pointer set, if needed.
10425 cl : first node to prepend before grp_start_p.
10426 tail_chain : pointer to chain of last prepended node.
10428 The general idea is we move the nodes for this struct mapping
10429 together: the alloc node goes into the sorted list directly after
10430 the struct mapping, and any extra nodes (together with the nodes
10431 mapping arrays pointed to by struct components) get moved after
10432 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
10433 the end of the struct component mapping list. It's important that
10434 the alloc_node comes first in that case because it's part of the
10435 sorted component mapping list (but subsequent nodes are not!). */
10437 if (scp)
10438 omp_siblist_insert_node_after (alloc_node, scp);
10440 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
10441 already inserted it) and the extra_node (if it is present). The
10442 list can be empty if we added alloc_node above and there is no
10443 extra node. */
10444 if (scp && extra_node)
10446 cl = extra_node;
10447 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10449 else if (extra_node)
10451 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10452 cl = alloc_node;
10453 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10455 else if (!scp)
10457 cl = alloc_node;
10458 tail_chain = &OMP_CLAUSE_CHAIN (alloc_node);
10461 continue_at
10462 = cl ? omp_siblist_move_concat_nodes_after (cl, tail_chain,
10463 grp_start_p, grp_end,
10465 : omp_siblist_move_nodes_after (grp_start_p, grp_end, sc);
10467 else if (*sc != grp_end)
10469 gcc_assert (*grp_start_p == grp_end);
10471 /* We are moving the current node back to a previous struct node:
10472 the node that used to point to the current node will now point to
10473 the next node. */
10474 continue_at = grp_start_p;
10475 /* In the non-pointer case, the mapping clause itself is moved into
10476 the correct position in the struct component list, which in this
10477 case is just SC. */
10478 omp_siblist_move_node_after (*grp_start_p, grp_start_p, sc);
10481 return continue_at;
10484 /* Scan through GROUPS, and create sorted structure sibling lists without
10485 gimplifying. */
10487 static bool
10488 omp_build_struct_sibling_lists (enum tree_code code,
10489 enum omp_region_type region_type,
10490 vec<omp_mapping_group> *groups,
10491 hash_map<tree_operand_hash, omp_mapping_group *>
10492 **grpmap,
10493 tree *list_p)
10495 unsigned i;
10496 omp_mapping_group *grp;
10497 hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
10498 bool success = true;
10499 tree *new_next = NULL;
10500 tree *tail = &OMP_CLAUSE_CHAIN ((*groups)[groups->length () - 1].grp_end);
10501 auto_vec<omp_mapping_group> pre_hwm_groups;
10503 FOR_EACH_VEC_ELT (*groups, i, grp)
10505 tree c = grp->grp_end;
10506 tree decl = OMP_CLAUSE_DECL (c);
10507 tree grp_end = grp->grp_end;
10508 tree sentinel = OMP_CLAUSE_CHAIN (grp_end);
10510 if (new_next)
10511 grp->grp_start = new_next;
10513 new_next = NULL;
10515 tree *grp_start_p = grp->grp_start;
10517 if (DECL_P (decl))
10518 continue;
10520 /* Skip groups we marked for deletion in
10521 oacc_resolve_clause_dependencies. */
10522 if (grp->deleted)
10523 continue;
10525 if (OMP_CLAUSE_CHAIN (*grp_start_p)
10526 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
10528 /* Don't process an array descriptor that isn't inside a derived type
10529 as a struct (the GOMP_MAP_POINTER following will have the form
10530 "var.data", but such mappings are handled specially). */
10531 tree grpmid = OMP_CLAUSE_CHAIN (*grp_start_p);
10532 if (OMP_CLAUSE_CODE (grpmid) == OMP_CLAUSE_MAP
10533 && OMP_CLAUSE_MAP_KIND (grpmid) == GOMP_MAP_TO_PSET
10534 && DECL_P (OMP_CLAUSE_DECL (grpmid)))
10535 continue;
10538 tree d = decl;
10539 if (TREE_CODE (d) == ARRAY_REF)
10541 while (TREE_CODE (d) == ARRAY_REF)
10542 d = TREE_OPERAND (d, 0);
10543 if (TREE_CODE (d) == COMPONENT_REF
10544 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
10545 decl = d;
10547 if (d == decl
10548 && TREE_CODE (decl) == INDIRECT_REF
10549 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10550 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10551 == REFERENCE_TYPE)
10552 && (OMP_CLAUSE_MAP_KIND (c)
10553 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
10554 decl = TREE_OPERAND (decl, 0);
10556 STRIP_NOPS (decl);
10558 if (TREE_CODE (decl) != COMPONENT_REF)
10559 continue;
10561 /* If we're mapping the whole struct in another node, skip adding this
10562 node to a sibling list. */
10563 omp_mapping_group *wholestruct;
10564 if (omp_mapped_by_containing_struct (*grpmap, OMP_CLAUSE_DECL (c),
10565 &wholestruct))
10567 if (!(region_type & ORT_ACC)
10568 && *grp_start_p == grp_end)
10569 /* Remove the whole of this mapping -- redundant. */
10570 grp->deleted = true;
10572 continue;
10575 if (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
10576 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
10577 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
10578 && code != OACC_UPDATE
10579 && code != OMP_TARGET_UPDATE)
10581 if (error_operand_p (decl))
10583 success = false;
10584 goto error_out;
10587 tree stype = TREE_TYPE (decl);
10588 if (TREE_CODE (stype) == REFERENCE_TYPE)
10589 stype = TREE_TYPE (stype);
10590 if (TYPE_SIZE_UNIT (stype) == NULL
10591 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
10593 error_at (OMP_CLAUSE_LOCATION (c),
10594 "mapping field %qE of variable length "
10595 "structure", OMP_CLAUSE_DECL (c));
10596 success = false;
10597 goto error_out;
10600 tree inner = NULL_TREE;
10602 new_next
10603 = omp_accumulate_sibling_list (region_type, code,
10604 struct_map_to_clause, grp_start_p,
10605 grp_end, &inner);
10607 if (inner)
10609 if (new_next && *new_next == NULL_TREE)
10610 *new_next = inner;
10611 else
10612 *tail = inner;
10614 OMP_CLAUSE_CHAIN (inner) = NULL_TREE;
10615 omp_mapping_group newgrp;
10616 newgrp.grp_start = new_next ? new_next : tail;
10617 newgrp.grp_end = inner;
10618 newgrp.mark = UNVISITED;
10619 newgrp.sibling = NULL;
10620 newgrp.deleted = false;
10621 newgrp.next = NULL;
10622 groups->safe_push (newgrp);
10624 /* !!! Growing GROUPS might invalidate the pointers in the group
10625 map. Rebuild it here. This is a bit inefficient, but
10626 shouldn't happen very often. */
10627 delete (*grpmap);
10628 *grpmap
10629 = omp_reindex_mapping_groups (list_p, groups, &pre_hwm_groups,
10630 sentinel);
10632 tail = &OMP_CLAUSE_CHAIN (inner);
10637 /* Delete groups marked for deletion above. At this point the order of the
10638 groups may no longer correspond to the order of the underlying list,
10639 which complicates this a little. First clear out OMP_CLAUSE_DECL for
10640 deleted nodes... */
10642 FOR_EACH_VEC_ELT (*groups, i, grp)
10643 if (grp->deleted)
10644 for (tree d = *grp->grp_start;
10645 d != OMP_CLAUSE_CHAIN (grp->grp_end);
10646 d = OMP_CLAUSE_CHAIN (d))
10647 OMP_CLAUSE_DECL (d) = NULL_TREE;
10649 /* ...then sweep through the list removing the now-empty nodes. */
10651 tail = list_p;
10652 while (*tail)
10654 if (OMP_CLAUSE_CODE (*tail) == OMP_CLAUSE_MAP
10655 && OMP_CLAUSE_DECL (*tail) == NULL_TREE)
10656 *tail = OMP_CLAUSE_CHAIN (*tail);
10657 else
10658 tail = &OMP_CLAUSE_CHAIN (*tail);
10661 error_out:
10662 if (struct_map_to_clause)
10663 delete struct_map_to_clause;
10665 return success;
10668 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
10669 and previous omp contexts. */
10671 static void
10672 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
10673 enum omp_region_type region_type,
10674 enum tree_code code)
10676 struct gimplify_omp_ctx *ctx, *outer_ctx;
10677 tree c;
10678 tree *prev_list_p = NULL, *orig_list_p = list_p;
10679 int handled_depend_iterators = -1;
10680 int nowait = -1;
10682 ctx = new_omp_context (region_type);
10683 ctx->code = code;
10684 outer_ctx = ctx->outer_context;
10685 if (code == OMP_TARGET)
10687 if (!lang_GNU_Fortran ())
10688 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
10689 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
10690 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
10691 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
10693 if (!lang_GNU_Fortran ())
10694 switch (code)
10696 case OMP_TARGET:
10697 case OMP_TARGET_DATA:
10698 case OMP_TARGET_ENTER_DATA:
10699 case OMP_TARGET_EXIT_DATA:
10700 case OACC_DECLARE:
10701 case OACC_HOST_DATA:
10702 case OACC_PARALLEL:
10703 case OACC_KERNELS:
10704 ctx->target_firstprivatize_array_bases = true;
10705 default:
10706 break;
10709 if (code == OMP_TARGET
10710 || code == OMP_TARGET_DATA
10711 || code == OMP_TARGET_ENTER_DATA
10712 || code == OMP_TARGET_EXIT_DATA)
10714 vec<omp_mapping_group> *groups;
10715 groups = omp_gather_mapping_groups (list_p);
10716 if (groups)
10718 hash_map<tree_operand_hash, omp_mapping_group *> *grpmap;
10719 grpmap = omp_index_mapping_groups (groups);
10721 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
10722 list_p);
10724 omp_mapping_group *outlist = NULL;
10726 /* Topological sorting may fail if we have duplicate nodes, which
10727 we should have detected and shown an error for already. Skip
10728 sorting in that case. */
10729 if (seen_error ())
10730 goto failure;
10732 delete grpmap;
10733 delete groups;
10735 /* Rebuild now we have struct sibling lists. */
10736 groups = omp_gather_mapping_groups (list_p);
10737 grpmap = omp_index_mapping_groups (groups);
10739 outlist = omp_tsort_mapping_groups (groups, grpmap);
10740 outlist = omp_segregate_mapping_groups (outlist);
10741 list_p = omp_reorder_mapping_groups (groups, outlist, list_p);
10743 failure:
10744 delete grpmap;
10745 delete groups;
10748 else if (region_type & ORT_ACC)
10750 vec<omp_mapping_group> *groups;
10751 groups = omp_gather_mapping_groups (list_p);
10752 if (groups)
10754 hash_map<tree_operand_hash, omp_mapping_group *> *grpmap;
10755 grpmap = omp_index_mapping_groups (groups);
10757 oacc_resolve_clause_dependencies (groups, grpmap);
10758 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
10759 list_p);
10761 delete groups;
10762 delete grpmap;
10766 while ((c = *list_p) != NULL)
10768 bool remove = false;
10769 bool notice_outer = true;
10770 const char *check_non_private = NULL;
10771 unsigned int flags;
10772 tree decl;
10774 switch (OMP_CLAUSE_CODE (c))
10776 case OMP_CLAUSE_PRIVATE:
10777 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
10778 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
10780 flags |= GOVD_PRIVATE_OUTER_REF;
10781 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
10783 else
10784 notice_outer = false;
10785 goto do_add;
10786 case OMP_CLAUSE_SHARED:
10787 flags = GOVD_SHARED | GOVD_EXPLICIT;
10788 goto do_add;
10789 case OMP_CLAUSE_FIRSTPRIVATE:
10790 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
10791 check_non_private = "firstprivate";
10792 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
10794 gcc_assert (code == OMP_TARGET);
10795 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
10797 goto do_add;
10798 case OMP_CLAUSE_LASTPRIVATE:
10799 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10800 switch (code)
10802 case OMP_DISTRIBUTE:
10803 error_at (OMP_CLAUSE_LOCATION (c),
10804 "conditional %<lastprivate%> clause on "
10805 "%qs construct", "distribute");
10806 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10807 break;
10808 case OMP_TASKLOOP:
10809 error_at (OMP_CLAUSE_LOCATION (c),
10810 "conditional %<lastprivate%> clause on "
10811 "%qs construct", "taskloop");
10812 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10813 break;
10814 default:
10815 break;
10817 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
10818 if (code != OMP_LOOP)
10819 check_non_private = "lastprivate";
10820 decl = OMP_CLAUSE_DECL (c);
10821 if (error_operand_p (decl))
10822 goto do_add;
10823 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
10824 && !lang_hooks.decls.omp_scalar_p (decl, true))
10826 error_at (OMP_CLAUSE_LOCATION (c),
10827 "non-scalar variable %qD in conditional "
10828 "%<lastprivate%> clause", decl);
10829 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10831 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10832 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
10833 omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
10834 false);
10835 goto do_add;
10836 case OMP_CLAUSE_REDUCTION:
10837 if (OMP_CLAUSE_REDUCTION_TASK (c))
10839 if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
10841 if (nowait == -1)
10842 nowait = omp_find_clause (*list_p,
10843 OMP_CLAUSE_NOWAIT) != NULL_TREE;
10844 if (nowait
10845 && (outer_ctx == NULL
10846 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
10848 error_at (OMP_CLAUSE_LOCATION (c),
10849 "%<task%> reduction modifier on a construct "
10850 "with a %<nowait%> clause");
10851 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
10854 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
10856 error_at (OMP_CLAUSE_LOCATION (c),
10857 "invalid %<task%> reduction modifier on construct "
10858 "other than %<parallel%>, %qs, %<sections%> or "
10859 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
10860 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
10863 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10864 switch (code)
10866 case OMP_SECTIONS:
10867 error_at (OMP_CLAUSE_LOCATION (c),
10868 "%<inscan%> %<reduction%> clause on "
10869 "%qs construct", "sections");
10870 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10871 break;
10872 case OMP_PARALLEL:
10873 error_at (OMP_CLAUSE_LOCATION (c),
10874 "%<inscan%> %<reduction%> clause on "
10875 "%qs construct", "parallel");
10876 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10877 break;
10878 case OMP_TEAMS:
10879 error_at (OMP_CLAUSE_LOCATION (c),
10880 "%<inscan%> %<reduction%> clause on "
10881 "%qs construct", "teams");
10882 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10883 break;
10884 case OMP_TASKLOOP:
10885 error_at (OMP_CLAUSE_LOCATION (c),
10886 "%<inscan%> %<reduction%> clause on "
10887 "%qs construct", "taskloop");
10888 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10889 break;
10890 case OMP_SCOPE:
10891 error_at (OMP_CLAUSE_LOCATION (c),
10892 "%<inscan%> %<reduction%> clause on "
10893 "%qs construct", "scope");
10894 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10895 break;
10896 default:
10897 break;
10899 /* FALLTHRU */
10900 case OMP_CLAUSE_IN_REDUCTION:
10901 case OMP_CLAUSE_TASK_REDUCTION:
10902 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
10903 /* OpenACC permits reductions on private variables. */
10904 if (!(region_type & ORT_ACC)
10905 /* taskgroup is actually not a worksharing region. */
10906 && code != OMP_TASKGROUP)
10907 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
10908 decl = OMP_CLAUSE_DECL (c);
10909 if (TREE_CODE (decl) == MEM_REF)
10911 tree type = TREE_TYPE (decl);
10912 bool saved_into_ssa = gimplify_ctxp->into_ssa;
10913 gimplify_ctxp->into_ssa = false;
10914 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
10915 NULL, is_gimple_val, fb_rvalue, false)
10916 == GS_ERROR)
10918 gimplify_ctxp->into_ssa = saved_into_ssa;
10919 remove = true;
10920 break;
10922 gimplify_ctxp->into_ssa = saved_into_ssa;
10923 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
10924 if (DECL_P (v))
10926 omp_firstprivatize_variable (ctx, v);
10927 omp_notice_variable (ctx, v, true);
10929 decl = TREE_OPERAND (decl, 0);
10930 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10932 gimplify_ctxp->into_ssa = false;
10933 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
10934 NULL, is_gimple_val, fb_rvalue, false)
10935 == GS_ERROR)
10937 gimplify_ctxp->into_ssa = saved_into_ssa;
10938 remove = true;
10939 break;
10941 gimplify_ctxp->into_ssa = saved_into_ssa;
10942 v = TREE_OPERAND (decl, 1);
10943 if (DECL_P (v))
10945 omp_firstprivatize_variable (ctx, v);
10946 omp_notice_variable (ctx, v, true);
10948 decl = TREE_OPERAND (decl, 0);
10950 if (TREE_CODE (decl) == ADDR_EXPR
10951 || TREE_CODE (decl) == INDIRECT_REF)
10952 decl = TREE_OPERAND (decl, 0);
10954 goto do_add_decl;
10955 case OMP_CLAUSE_LINEAR:
10956 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
10957 is_gimple_val, fb_rvalue) == GS_ERROR)
10959 remove = true;
10960 break;
10962 else
10964 if (code == OMP_SIMD
10965 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10967 struct gimplify_omp_ctx *octx = outer_ctx;
10968 if (octx
10969 && octx->region_type == ORT_WORKSHARE
10970 && octx->combined_loop
10971 && !octx->distribute)
10973 if (octx->outer_context
10974 && (octx->outer_context->region_type
10975 == ORT_COMBINED_PARALLEL))
10976 octx = octx->outer_context->outer_context;
10977 else
10978 octx = octx->outer_context;
10980 if (octx
10981 && octx->region_type == ORT_WORKSHARE
10982 && octx->combined_loop
10983 && octx->distribute)
10985 error_at (OMP_CLAUSE_LOCATION (c),
10986 "%<linear%> clause for variable other than "
10987 "loop iterator specified on construct "
10988 "combined with %<distribute%>");
10989 remove = true;
10990 break;
10993 /* For combined #pragma omp parallel for simd, need to put
10994 lastprivate and perhaps firstprivate too on the
10995 parallel. Similarly for #pragma omp for simd. */
10996 struct gimplify_omp_ctx *octx = outer_ctx;
10997 bool taskloop_seen = false;
10998 decl = NULL_TREE;
11001 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11002 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11003 break;
11004 decl = OMP_CLAUSE_DECL (c);
11005 if (error_operand_p (decl))
11007 decl = NULL_TREE;
11008 break;
11010 flags = GOVD_SEEN;
11011 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11012 flags |= GOVD_FIRSTPRIVATE;
11013 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11014 flags |= GOVD_LASTPRIVATE;
11015 if (octx
11016 && octx->region_type == ORT_WORKSHARE
11017 && octx->combined_loop)
11019 if (octx->outer_context
11020 && (octx->outer_context->region_type
11021 == ORT_COMBINED_PARALLEL))
11022 octx = octx->outer_context;
11023 else if (omp_check_private (octx, decl, false))
11024 break;
11026 else if (octx
11027 && (octx->region_type & ORT_TASK) != 0
11028 && octx->combined_loop)
11029 taskloop_seen = true;
11030 else if (octx
11031 && octx->region_type == ORT_COMBINED_PARALLEL
11032 && ((ctx->region_type == ORT_WORKSHARE
11033 && octx == outer_ctx)
11034 || taskloop_seen))
11035 flags = GOVD_SEEN | GOVD_SHARED;
11036 else if (octx
11037 && ((octx->region_type & ORT_COMBINED_TEAMS)
11038 == ORT_COMBINED_TEAMS))
11039 flags = GOVD_SEEN | GOVD_SHARED;
11040 else if (octx
11041 && octx->region_type == ORT_COMBINED_TARGET)
11043 if (flags & GOVD_LASTPRIVATE)
11044 flags = GOVD_SEEN | GOVD_MAP;
11046 else
11047 break;
11048 splay_tree_node on
11049 = splay_tree_lookup (octx->variables,
11050 (splay_tree_key) decl);
11051 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
11053 octx = NULL;
11054 break;
11056 omp_add_variable (octx, decl, flags);
11057 if (octx->outer_context == NULL)
11058 break;
11059 octx = octx->outer_context;
11061 while (1);
11062 if (octx
11063 && decl
11064 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11065 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
11066 omp_notice_variable (octx, decl, true);
11068 flags = GOVD_LINEAR | GOVD_EXPLICIT;
11069 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11070 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11072 notice_outer = false;
11073 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11075 goto do_add;
11077 case OMP_CLAUSE_MAP:
11078 decl = OMP_CLAUSE_DECL (c);
11079 if (error_operand_p (decl))
11080 remove = true;
11081 switch (code)
11083 case OMP_TARGET:
11084 break;
11085 case OACC_DATA:
11086 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
11087 break;
11088 /* FALLTHRU */
11089 case OMP_TARGET_DATA:
11090 case OMP_TARGET_ENTER_DATA:
11091 case OMP_TARGET_EXIT_DATA:
11092 case OACC_ENTER_DATA:
11093 case OACC_EXIT_DATA:
11094 case OACC_HOST_DATA:
11095 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11096 || (OMP_CLAUSE_MAP_KIND (c)
11097 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11098 /* For target {,enter ,exit }data only the array slice is
11099 mapped, but not the pointer to it. */
11100 remove = true;
11101 break;
11102 default:
11103 break;
11105 /* For Fortran, not only the pointer to the data is mapped but also
11106 the address of the pointer, the array descriptor etc.; for
11107 'exit data' - and in particular for 'delete:' - having an 'alloc:'
11108 does not make sense. Likewise, for 'update' only transferring the
11109 data itself is needed as the rest has been handled in previous
11110 directives. However, for 'exit data', the array descriptor needs
11111 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
11113 NOTE: Generally, it is not safe to perform "enter data" operations
11114 on arrays where the data *or the descriptor* may go out of scope
11115 before a corresponding "exit data" operation -- and such a
11116 descriptor may be synthesized temporarily, e.g. to pass an
11117 explicit-shape array to a function expecting an assumed-shape
11118 argument. Performing "enter data" inside the called function
11119 would thus be problematic. */
11120 if (code == OMP_TARGET_EXIT_DATA
11121 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
11122 OMP_CLAUSE_SET_MAP_KIND (c, OMP_CLAUSE_MAP_KIND (*prev_list_p)
11123 == GOMP_MAP_DELETE
11124 ? GOMP_MAP_DELETE : GOMP_MAP_RELEASE);
11125 else if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
11126 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11127 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
11128 remove = true;
11130 if (remove)
11131 break;
11132 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
11134 struct gimplify_omp_ctx *octx;
11135 for (octx = outer_ctx; octx; octx = octx->outer_context)
11137 if (octx->region_type != ORT_ACC_HOST_DATA)
11138 break;
11139 splay_tree_node n2
11140 = splay_tree_lookup (octx->variables,
11141 (splay_tree_key) decl);
11142 if (n2)
11143 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
11144 "declared in enclosing %<host_data%> region",
11145 DECL_NAME (decl));
11148 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11149 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11150 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11151 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11152 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11154 remove = true;
11155 break;
11157 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11158 || (OMP_CLAUSE_MAP_KIND (c)
11159 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11160 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11161 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
11163 OMP_CLAUSE_SIZE (c)
11164 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
11165 false);
11166 if ((region_type & ORT_TARGET) != 0)
11167 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
11168 GOVD_FIRSTPRIVATE | GOVD_SEEN);
11171 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
11173 tree base = omp_strip_components_and_deref (decl);
11174 if (DECL_P (base))
11176 decl = base;
11177 splay_tree_node n
11178 = splay_tree_lookup (ctx->variables,
11179 (splay_tree_key) decl);
11180 if (seen_error ()
11181 && n
11182 && (n->value & (GOVD_MAP | GOVD_FIRSTPRIVATE)) != 0)
11184 remove = true;
11185 break;
11187 flags = GOVD_MAP | GOVD_EXPLICIT;
11189 goto do_add_decl;
11193 if (TREE_CODE (decl) == TARGET_EXPR)
11195 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11196 is_gimple_lvalue, fb_lvalue)
11197 == GS_ERROR)
11198 remove = true;
11200 else if (!DECL_P (decl))
11202 tree d = decl, *pd;
11203 if (TREE_CODE (d) == ARRAY_REF)
11205 while (TREE_CODE (d) == ARRAY_REF)
11206 d = TREE_OPERAND (d, 0);
11207 if (TREE_CODE (d) == COMPONENT_REF
11208 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
11209 decl = d;
11211 pd = &OMP_CLAUSE_DECL (c);
11212 if (d == decl
11213 && TREE_CODE (decl) == INDIRECT_REF
11214 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
11215 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
11216 == REFERENCE_TYPE)
11217 && (OMP_CLAUSE_MAP_KIND (c)
11218 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
11220 pd = &TREE_OPERAND (decl, 0);
11221 decl = TREE_OPERAND (decl, 0);
11223 /* An "attach/detach" operation on an update directive should
11224 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
11225 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
11226 depends on the previous mapping. */
11227 if (code == OACC_UPDATE
11228 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11229 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
11231 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11233 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11234 == ARRAY_TYPE)
11235 remove = true;
11236 else
11238 gomp_map_kind k = ((code == OACC_EXIT_DATA
11239 || code == OMP_TARGET_EXIT_DATA)
11240 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
11241 OMP_CLAUSE_SET_MAP_KIND (c, k);
11245 tree cref = decl;
11247 while (TREE_CODE (cref) == ARRAY_REF)
11248 cref = TREE_OPERAND (cref, 0);
11250 if (TREE_CODE (cref) == INDIRECT_REF)
11251 cref = TREE_OPERAND (cref, 0);
11253 if (TREE_CODE (cref) == COMPONENT_REF)
11255 tree base = cref;
11256 while (base && !DECL_P (base))
11258 tree innerbase = omp_get_base_pointer (base);
11259 if (!innerbase)
11260 break;
11261 base = innerbase;
11263 if (base
11264 && DECL_P (base)
11265 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
11266 && POINTER_TYPE_P (TREE_TYPE (base)))
11268 splay_tree_node n
11269 = splay_tree_lookup (ctx->variables,
11270 (splay_tree_key) base);
11271 n->value |= GOVD_SEEN;
11275 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
11277 /* Don't gimplify *pd fully at this point, as the base
11278 will need to be adjusted during omp lowering. */
11279 auto_vec<tree, 10> expr_stack;
11280 tree *p = pd;
11281 while (handled_component_p (*p)
11282 || TREE_CODE (*p) == INDIRECT_REF
11283 || TREE_CODE (*p) == ADDR_EXPR
11284 || TREE_CODE (*p) == MEM_REF
11285 || TREE_CODE (*p) == NON_LVALUE_EXPR)
11287 expr_stack.safe_push (*p);
11288 p = &TREE_OPERAND (*p, 0);
11290 for (int i = expr_stack.length () - 1; i >= 0; i--)
11292 tree t = expr_stack[i];
11293 if (TREE_CODE (t) == ARRAY_REF
11294 || TREE_CODE (t) == ARRAY_RANGE_REF)
11296 if (TREE_OPERAND (t, 2) == NULL_TREE)
11298 tree low = unshare_expr (array_ref_low_bound (t));
11299 if (!is_gimple_min_invariant (low))
11301 TREE_OPERAND (t, 2) = low;
11302 if (gimplify_expr (&TREE_OPERAND (t, 2),
11303 pre_p, NULL,
11304 is_gimple_reg,
11305 fb_rvalue) == GS_ERROR)
11306 remove = true;
11309 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11310 NULL, is_gimple_reg,
11311 fb_rvalue) == GS_ERROR)
11312 remove = true;
11313 if (TREE_OPERAND (t, 3) == NULL_TREE)
11315 tree elmt_size = array_ref_element_size (t);
11316 if (!is_gimple_min_invariant (elmt_size))
11318 elmt_size = unshare_expr (elmt_size);
11319 tree elmt_type
11320 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
11321 0)));
11322 tree factor
11323 = size_int (TYPE_ALIGN_UNIT (elmt_type));
11324 elmt_size
11325 = size_binop (EXACT_DIV_EXPR, elmt_size,
11326 factor);
11327 TREE_OPERAND (t, 3) = elmt_size;
11328 if (gimplify_expr (&TREE_OPERAND (t, 3),
11329 pre_p, NULL,
11330 is_gimple_reg,
11331 fb_rvalue) == GS_ERROR)
11332 remove = true;
11335 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
11336 NULL, is_gimple_reg,
11337 fb_rvalue) == GS_ERROR)
11338 remove = true;
11340 else if (TREE_CODE (t) == COMPONENT_REF)
11342 if (TREE_OPERAND (t, 2) == NULL_TREE)
11344 tree offset = component_ref_field_offset (t);
11345 if (!is_gimple_min_invariant (offset))
11347 offset = unshare_expr (offset);
11348 tree field = TREE_OPERAND (t, 1);
11349 tree factor
11350 = size_int (DECL_OFFSET_ALIGN (field)
11351 / BITS_PER_UNIT);
11352 offset = size_binop (EXACT_DIV_EXPR, offset,
11353 factor);
11354 TREE_OPERAND (t, 2) = offset;
11355 if (gimplify_expr (&TREE_OPERAND (t, 2),
11356 pre_p, NULL,
11357 is_gimple_reg,
11358 fb_rvalue) == GS_ERROR)
11359 remove = true;
11362 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11363 NULL, is_gimple_reg,
11364 fb_rvalue) == GS_ERROR)
11365 remove = true;
11368 for (; expr_stack.length () > 0; )
11370 tree t = expr_stack.pop ();
11372 if (TREE_CODE (t) == ARRAY_REF
11373 || TREE_CODE (t) == ARRAY_RANGE_REF)
11375 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
11376 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
11377 NULL, is_gimple_val,
11378 fb_rvalue) == GS_ERROR)
11379 remove = true;
11383 else if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
11384 fb_lvalue) == GS_ERROR)
11386 remove = true;
11387 break;
11390 if (!remove
11391 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
11392 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
11393 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
11394 && OMP_CLAUSE_CHAIN (c)
11395 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
11396 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11397 == GOMP_MAP_ALWAYS_POINTER)
11398 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11399 == GOMP_MAP_ATTACH_DETACH)
11400 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11401 == GOMP_MAP_TO_PSET)))
11402 prev_list_p = list_p;
11404 break;
11406 flags = GOVD_MAP | GOVD_EXPLICIT;
11407 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
11408 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
11409 flags |= GOVD_MAP_ALWAYS_TO;
11411 if ((code == OMP_TARGET
11412 || code == OMP_TARGET_DATA
11413 || code == OMP_TARGET_ENTER_DATA
11414 || code == OMP_TARGET_EXIT_DATA)
11415 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11417 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
11418 octx = octx->outer_context)
11420 splay_tree_node n
11421 = splay_tree_lookup (octx->variables,
11422 (splay_tree_key) OMP_CLAUSE_DECL (c));
11423 /* If this is contained in an outer OpenMP region as a
11424 firstprivate value, remove the attach/detach. */
11425 if (n && (n->value & GOVD_FIRSTPRIVATE))
11427 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
11428 goto do_add;
11432 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
11433 ? GOMP_MAP_DETACH
11434 : GOMP_MAP_ATTACH);
11435 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
11438 goto do_add;
11440 case OMP_CLAUSE_AFFINITY:
11441 gimplify_omp_affinity (list_p, pre_p);
11442 remove = true;
11443 break;
11444 case OMP_CLAUSE_DOACROSS:
11445 if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
11447 tree deps = OMP_CLAUSE_DECL (c);
11448 while (deps && TREE_CODE (deps) == TREE_LIST)
11450 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
11451 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
11452 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
11453 pre_p, NULL, is_gimple_val, fb_rvalue);
11454 deps = TREE_CHAIN (deps);
11457 else
11458 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c)
11459 == OMP_CLAUSE_DOACROSS_SOURCE);
11460 break;
11461 case OMP_CLAUSE_DEPEND:
11462 if (handled_depend_iterators == -1)
11463 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
11464 if (handled_depend_iterators)
11466 if (handled_depend_iterators == 2)
11467 remove = true;
11468 break;
11470 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
11472 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
11473 NULL, is_gimple_val, fb_rvalue);
11474 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
11476 if (error_operand_p (OMP_CLAUSE_DECL (c)))
11478 remove = true;
11479 break;
11481 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
11483 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
11484 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11485 is_gimple_val, fb_rvalue) == GS_ERROR)
11487 remove = true;
11488 break;
11491 if (code == OMP_TASK)
11492 ctx->has_depend = true;
11493 break;
11495 case OMP_CLAUSE_TO:
11496 case OMP_CLAUSE_FROM:
11497 case OMP_CLAUSE__CACHE_:
11498 decl = OMP_CLAUSE_DECL (c);
11499 if (error_operand_p (decl))
11501 remove = true;
11502 break;
11504 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11505 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11506 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11507 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11508 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11510 remove = true;
11511 break;
11513 if (!DECL_P (decl))
11515 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
11516 NULL, is_gimple_lvalue, fb_lvalue)
11517 == GS_ERROR)
11519 remove = true;
11520 break;
11522 break;
11524 goto do_notice;
11526 case OMP_CLAUSE_USE_DEVICE_PTR:
11527 case OMP_CLAUSE_USE_DEVICE_ADDR:
11528 flags = GOVD_EXPLICIT;
11529 goto do_add;
11531 case OMP_CLAUSE_HAS_DEVICE_ADDR:
11532 decl = OMP_CLAUSE_DECL (c);
11533 while (TREE_CODE (decl) == INDIRECT_REF
11534 || TREE_CODE (decl) == ARRAY_REF)
11535 decl = TREE_OPERAND (decl, 0);
11536 flags = GOVD_EXPLICIT;
11537 goto do_add_decl;
11539 case OMP_CLAUSE_IS_DEVICE_PTR:
11540 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
11541 goto do_add;
11543 do_add:
11544 decl = OMP_CLAUSE_DECL (c);
11545 do_add_decl:
11546 if (error_operand_p (decl))
11548 remove = true;
11549 break;
11551 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
11553 tree t = omp_member_access_dummy_var (decl);
11554 if (t)
11556 tree v = DECL_VALUE_EXPR (decl);
11557 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
11558 if (outer_ctx)
11559 omp_notice_variable (outer_ctx, t, true);
11562 if (code == OACC_DATA
11563 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11564 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
11565 flags |= GOVD_MAP_0LEN_ARRAY;
11566 omp_add_variable (ctx, decl, flags);
11567 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11568 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
11569 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
11570 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11572 struct gimplify_omp_ctx *pctx
11573 = code == OMP_TARGET ? outer_ctx : ctx;
11574 if (pctx)
11575 omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
11576 GOVD_LOCAL | GOVD_SEEN);
11577 if (pctx
11578 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
11579 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
11580 find_decl_expr,
11581 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11582 NULL) == NULL_TREE)
11583 omp_add_variable (pctx,
11584 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11585 GOVD_LOCAL | GOVD_SEEN);
11586 gimplify_omp_ctxp = pctx;
11587 push_gimplify_context ();
11589 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11590 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11592 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
11593 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
11594 pop_gimplify_context
11595 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
11596 push_gimplify_context ();
11597 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
11598 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
11599 pop_gimplify_context
11600 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
11601 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
11602 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
11604 gimplify_omp_ctxp = outer_ctx;
11606 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11607 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
11609 gimplify_omp_ctxp = ctx;
11610 push_gimplify_context ();
11611 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
11613 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11614 NULL, NULL);
11615 TREE_SIDE_EFFECTS (bind) = 1;
11616 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
11617 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
11619 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
11620 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
11621 pop_gimplify_context
11622 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
11623 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
11625 gimplify_omp_ctxp = outer_ctx;
11627 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11628 && OMP_CLAUSE_LINEAR_STMT (c))
11630 gimplify_omp_ctxp = ctx;
11631 push_gimplify_context ();
11632 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
11634 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11635 NULL, NULL);
11636 TREE_SIDE_EFFECTS (bind) = 1;
11637 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
11638 OMP_CLAUSE_LINEAR_STMT (c) = bind;
11640 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
11641 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
11642 pop_gimplify_context
11643 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
11644 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
11646 gimplify_omp_ctxp = outer_ctx;
11648 if (notice_outer)
11649 goto do_notice;
11650 break;
11652 case OMP_CLAUSE_COPYIN:
11653 case OMP_CLAUSE_COPYPRIVATE:
11654 decl = OMP_CLAUSE_DECL (c);
11655 if (error_operand_p (decl))
11657 remove = true;
11658 break;
11660 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
11661 && !remove
11662 && !omp_check_private (ctx, decl, true))
11664 remove = true;
11665 if (is_global_var (decl))
11667 if (DECL_THREAD_LOCAL_P (decl))
11668 remove = false;
11669 else if (DECL_HAS_VALUE_EXPR_P (decl))
11671 tree value = get_base_address (DECL_VALUE_EXPR (decl));
11673 if (value
11674 && DECL_P (value)
11675 && DECL_THREAD_LOCAL_P (value))
11676 remove = false;
11679 if (remove)
11680 error_at (OMP_CLAUSE_LOCATION (c),
11681 "copyprivate variable %qE is not threadprivate"
11682 " or private in outer context", DECL_NAME (decl));
11684 do_notice:
11685 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11686 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
11687 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11688 && outer_ctx
11689 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
11690 || (region_type == ORT_WORKSHARE
11691 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11692 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
11693 || code == OMP_LOOP)))
11694 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
11695 || (code == OMP_LOOP
11696 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11697 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
11698 == ORT_COMBINED_TEAMS))))
11700 splay_tree_node on
11701 = splay_tree_lookup (outer_ctx->variables,
11702 (splay_tree_key)decl);
11703 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
11705 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11706 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
11707 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
11708 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11709 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
11710 == POINTER_TYPE))))
11711 omp_firstprivatize_variable (outer_ctx, decl);
11712 else
11714 omp_add_variable (outer_ctx, decl,
11715 GOVD_SEEN | GOVD_SHARED);
11716 if (outer_ctx->outer_context)
11717 omp_notice_variable (outer_ctx->outer_context, decl,
11718 true);
11722 if (outer_ctx)
11723 omp_notice_variable (outer_ctx, decl, true);
11724 if (check_non_private
11725 && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
11726 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
11727 || decl == OMP_CLAUSE_DECL (c)
11728 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
11729 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11730 == ADDR_EXPR
11731 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11732 == POINTER_PLUS_EXPR
11733 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
11734 (OMP_CLAUSE_DECL (c), 0), 0))
11735 == ADDR_EXPR)))))
11736 && omp_check_private (ctx, decl, false))
11738 error ("%s variable %qE is private in outer context",
11739 check_non_private, DECL_NAME (decl));
11740 remove = true;
11742 break;
11744 case OMP_CLAUSE_DETACH:
11745 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
11746 goto do_add;
11748 case OMP_CLAUSE_IF:
11749 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
11750 && OMP_CLAUSE_IF_MODIFIER (c) != code)
11752 const char *p[2];
11753 for (int i = 0; i < 2; i++)
11754 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
11756 case VOID_CST: p[i] = "cancel"; break;
11757 case OMP_PARALLEL: p[i] = "parallel"; break;
11758 case OMP_SIMD: p[i] = "simd"; break;
11759 case OMP_TASK: p[i] = "task"; break;
11760 case OMP_TASKLOOP: p[i] = "taskloop"; break;
11761 case OMP_TARGET_DATA: p[i] = "target data"; break;
11762 case OMP_TARGET: p[i] = "target"; break;
11763 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
11764 case OMP_TARGET_ENTER_DATA:
11765 p[i] = "target enter data"; break;
11766 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
11767 default: gcc_unreachable ();
11769 error_at (OMP_CLAUSE_LOCATION (c),
11770 "expected %qs %<if%> clause modifier rather than %qs",
11771 p[0], p[1]);
11772 remove = true;
11774 /* Fall through. */
11776 case OMP_CLAUSE_FINAL:
11777 OMP_CLAUSE_OPERAND (c, 0)
11778 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
11779 /* Fall through. */
11781 case OMP_CLAUSE_NUM_TEAMS:
11782 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
11783 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
11784 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
11786 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
11788 remove = true;
11789 break;
11791 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
11792 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
11793 pre_p, NULL, true);
11795 /* Fall through. */
11797 case OMP_CLAUSE_SCHEDULE:
11798 case OMP_CLAUSE_NUM_THREADS:
11799 case OMP_CLAUSE_THREAD_LIMIT:
11800 case OMP_CLAUSE_DIST_SCHEDULE:
11801 case OMP_CLAUSE_DEVICE:
11802 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
11803 && OMP_CLAUSE_DEVICE_ANCESTOR (c))
11805 if (code != OMP_TARGET)
11807 error_at (OMP_CLAUSE_LOCATION (c),
11808 "%<device%> clause with %<ancestor%> is only "
11809 "allowed on %<target%> construct");
11810 remove = true;
11811 break;
11814 tree clauses = *orig_list_p;
11815 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
11816 if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
11817 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
11818 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
11819 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
11820 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
11823 error_at (OMP_CLAUSE_LOCATION (c),
11824 "with %<ancestor%>, only the %<device%>, "
11825 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
11826 "and %<map%> clauses may appear on the "
11827 "construct");
11828 remove = true;
11829 break;
11832 /* Fall through. */
11834 case OMP_CLAUSE_PRIORITY:
11835 case OMP_CLAUSE_GRAINSIZE:
11836 case OMP_CLAUSE_NUM_TASKS:
11837 case OMP_CLAUSE_FILTER:
11838 case OMP_CLAUSE_HINT:
11839 case OMP_CLAUSE_ASYNC:
11840 case OMP_CLAUSE_WAIT:
11841 case OMP_CLAUSE_NUM_GANGS:
11842 case OMP_CLAUSE_NUM_WORKERS:
11843 case OMP_CLAUSE_VECTOR_LENGTH:
11844 case OMP_CLAUSE_WORKER:
11845 case OMP_CLAUSE_VECTOR:
11846 if (OMP_CLAUSE_OPERAND (c, 0)
11847 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
11849 if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
11851 remove = true;
11852 break;
11854 /* All these clauses care about value, not a particular decl,
11855 so try to force it into a SSA_NAME or fresh temporary. */
11856 OMP_CLAUSE_OPERAND (c, 0)
11857 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
11858 pre_p, NULL, true);
11860 break;
11862 case OMP_CLAUSE_GANG:
11863 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
11864 is_gimple_val, fb_rvalue) == GS_ERROR)
11865 remove = true;
11866 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
11867 is_gimple_val, fb_rvalue) == GS_ERROR)
11868 remove = true;
11869 break;
11871 case OMP_CLAUSE_NOWAIT:
11872 nowait = 1;
11873 break;
11875 case OMP_CLAUSE_ORDERED:
11876 case OMP_CLAUSE_UNTIED:
11877 case OMP_CLAUSE_COLLAPSE:
11878 case OMP_CLAUSE_TILE:
11879 case OMP_CLAUSE_AUTO:
11880 case OMP_CLAUSE_SEQ:
11881 case OMP_CLAUSE_INDEPENDENT:
11882 case OMP_CLAUSE_MERGEABLE:
11883 case OMP_CLAUSE_PROC_BIND:
11884 case OMP_CLAUSE_SAFELEN:
11885 case OMP_CLAUSE_SIMDLEN:
11886 case OMP_CLAUSE_NOGROUP:
11887 case OMP_CLAUSE_THREADS:
11888 case OMP_CLAUSE_SIMD:
11889 case OMP_CLAUSE_BIND:
11890 case OMP_CLAUSE_IF_PRESENT:
11891 case OMP_CLAUSE_FINALIZE:
11892 break;
11894 case OMP_CLAUSE_ORDER:
11895 ctx->order_concurrent = true;
11896 break;
11898 case OMP_CLAUSE_DEFAULTMAP:
11899 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
11900 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
11902 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
11903 gdmkmin = GDMK_SCALAR;
11904 gdmkmax = GDMK_POINTER;
11905 break;
11906 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
11907 gdmkmin = GDMK_SCALAR;
11908 gdmkmax = GDMK_SCALAR_TARGET;
11909 break;
11910 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
11911 gdmkmin = gdmkmax = GDMK_AGGREGATE;
11912 break;
11913 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
11914 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
11915 break;
11916 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
11917 gdmkmin = gdmkmax = GDMK_POINTER;
11918 break;
11919 default:
11920 gcc_unreachable ();
11922 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
11923 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
11925 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
11926 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
11927 break;
11928 case OMP_CLAUSE_DEFAULTMAP_TO:
11929 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
11930 break;
11931 case OMP_CLAUSE_DEFAULTMAP_FROM:
11932 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
11933 break;
11934 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
11935 ctx->defaultmap[gdmk] = GOVD_MAP;
11936 break;
11937 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
11938 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
11939 break;
11940 case OMP_CLAUSE_DEFAULTMAP_NONE:
11941 ctx->defaultmap[gdmk] = 0;
11942 break;
11943 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
11944 switch (gdmk)
11946 case GDMK_SCALAR:
11947 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
11948 break;
11949 case GDMK_SCALAR_TARGET:
11950 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
11951 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
11952 break;
11953 case GDMK_AGGREGATE:
11954 case GDMK_ALLOCATABLE:
11955 ctx->defaultmap[gdmk] = GOVD_MAP;
11956 break;
11957 case GDMK_POINTER:
11958 ctx->defaultmap[gdmk] = GOVD_MAP;
11959 if (!lang_GNU_Fortran ())
11960 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
11961 break;
11962 default:
11963 gcc_unreachable ();
11965 break;
11966 default:
11967 gcc_unreachable ();
11969 break;
11971 case OMP_CLAUSE_ALIGNED:
11972 decl = OMP_CLAUSE_DECL (c);
11973 if (error_operand_p (decl))
11975 remove = true;
11976 break;
11978 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
11979 is_gimple_val, fb_rvalue) == GS_ERROR)
11981 remove = true;
11982 break;
11984 if (!is_global_var (decl)
11985 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
11986 omp_add_variable (ctx, decl, GOVD_ALIGNED);
11987 break;
11989 case OMP_CLAUSE_NONTEMPORAL:
11990 decl = OMP_CLAUSE_DECL (c);
11991 if (error_operand_p (decl))
11993 remove = true;
11994 break;
11996 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
11997 break;
11999 case OMP_CLAUSE_ALLOCATE:
12000 decl = OMP_CLAUSE_DECL (c);
12001 if (error_operand_p (decl))
12003 remove = true;
12004 break;
12006 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
12007 is_gimple_val, fb_rvalue) == GS_ERROR)
12009 remove = true;
12010 break;
12012 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
12013 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
12014 == INTEGER_CST))
12016 else if (code == OMP_TASKLOOP
12017 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
12018 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12019 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12020 pre_p, NULL, false);
12021 break;
12023 case OMP_CLAUSE_DEFAULT:
12024 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
12025 break;
12027 case OMP_CLAUSE_INCLUSIVE:
12028 case OMP_CLAUSE_EXCLUSIVE:
12029 decl = OMP_CLAUSE_DECL (c);
12031 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
12032 (splay_tree_key) decl);
12033 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
12035 error_at (OMP_CLAUSE_LOCATION (c),
12036 "%qD specified in %qs clause but not in %<inscan%> "
12037 "%<reduction%> clause on the containing construct",
12038 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
12039 remove = true;
12041 else
12043 n->value |= GOVD_REDUCTION_INSCAN;
12044 if (outer_ctx->region_type == ORT_SIMD
12045 && outer_ctx->outer_context
12046 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
12048 n = splay_tree_lookup (outer_ctx->outer_context->variables,
12049 (splay_tree_key) decl);
12050 if (n && (n->value & GOVD_REDUCTION) != 0)
12051 n->value |= GOVD_REDUCTION_INSCAN;
12055 break;
12057 case OMP_CLAUSE_NOHOST:
12058 default:
12059 gcc_unreachable ();
12062 if (code == OACC_DATA
12063 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12064 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12065 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12066 remove = true;
12067 if (remove)
12068 *list_p = OMP_CLAUSE_CHAIN (c);
12069 else
12070 list_p = &OMP_CLAUSE_CHAIN (c);
12073 ctx->clauses = *orig_list_p;
12074 gimplify_omp_ctxp = ctx;
12077 /* Return true if DECL is a candidate for shared to firstprivate
12078 optimization. We only consider non-addressable scalars, not
12079 too big, and not references. */
12081 static bool
12082 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
12084 if (TREE_ADDRESSABLE (decl))
12085 return false;
12086 tree type = TREE_TYPE (decl);
12087 if (!is_gimple_reg_type (type)
12088 || TREE_CODE (type) == REFERENCE_TYPE
12089 || TREE_ADDRESSABLE (type))
12090 return false;
12091 /* Don't optimize too large decls, as each thread/task will have
12092 its own. */
12093 HOST_WIDE_INT len = int_size_in_bytes (type);
12094 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
12095 return false;
12096 if (omp_privatize_by_reference (decl))
12097 return false;
12098 return true;
12101 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
12102 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
12103 GOVD_WRITTEN in outer contexts. */
12105 static void
12106 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
12108 for (; ctx; ctx = ctx->outer_context)
12110 splay_tree_node n = splay_tree_lookup (ctx->variables,
12111 (splay_tree_key) decl);
12112 if (n == NULL)
12113 continue;
12114 else if (n->value & GOVD_SHARED)
12116 n->value |= GOVD_WRITTEN;
12117 return;
12119 else if (n->value & GOVD_DATA_SHARE_CLASS)
12120 return;
12124 /* Helper callback for walk_gimple_seq to discover possible stores
12125 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12126 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12127 for those. */
12129 static tree
12130 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
12132 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12134 *walk_subtrees = 0;
12135 if (!wi->is_lhs)
12136 return NULL_TREE;
12138 tree op = *tp;
12141 if (handled_component_p (op))
12142 op = TREE_OPERAND (op, 0);
12143 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
12144 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
12145 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
12146 else
12147 break;
12149 while (1);
12150 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
12151 return NULL_TREE;
12153 omp_mark_stores (gimplify_omp_ctxp, op);
12154 return NULL_TREE;
12157 /* Helper callback for walk_gimple_seq to discover possible stores
12158 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12159 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12160 for those. */
12162 static tree
12163 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
12164 bool *handled_ops_p,
12165 struct walk_stmt_info *wi)
12167 gimple *stmt = gsi_stmt (*gsi_p);
12168 switch (gimple_code (stmt))
12170 /* Don't recurse on OpenMP constructs for which
12171 gimplify_adjust_omp_clauses already handled the bodies,
12172 except handle gimple_omp_for_pre_body. */
12173 case GIMPLE_OMP_FOR:
12174 *handled_ops_p = true;
12175 if (gimple_omp_for_pre_body (stmt))
12176 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12177 omp_find_stores_stmt, omp_find_stores_op, wi);
12178 break;
12179 case GIMPLE_OMP_PARALLEL:
12180 case GIMPLE_OMP_TASK:
12181 case GIMPLE_OMP_SECTIONS:
12182 case GIMPLE_OMP_SINGLE:
12183 case GIMPLE_OMP_SCOPE:
12184 case GIMPLE_OMP_TARGET:
12185 case GIMPLE_OMP_TEAMS:
12186 case GIMPLE_OMP_CRITICAL:
12187 *handled_ops_p = true;
12188 break;
12189 default:
12190 break;
12192 return NULL_TREE;
12195 struct gimplify_adjust_omp_clauses_data
12197 tree *list_p;
12198 gimple_seq *pre_p;
12201 /* For all variables that were not actually used within the context,
12202 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
12204 static int
12205 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
12207 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
12208 gimple_seq *pre_p
12209 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
12210 tree decl = (tree) n->key;
12211 unsigned flags = n->value;
12212 enum omp_clause_code code;
12213 tree clause;
12214 bool private_debug;
12216 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12217 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
12218 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
12219 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
12220 return 0;
12221 if ((flags & GOVD_SEEN) == 0)
12222 return 0;
12223 if (flags & GOVD_DEBUG_PRIVATE)
12225 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
12226 private_debug = true;
12228 else if (flags & GOVD_MAP)
12229 private_debug = false;
12230 else
12231 private_debug
12232 = lang_hooks.decls.omp_private_debug_clause (decl,
12233 !!(flags & GOVD_SHARED));
12234 if (private_debug)
12235 code = OMP_CLAUSE_PRIVATE;
12236 else if (flags & GOVD_MAP)
12238 code = OMP_CLAUSE_MAP;
12239 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12240 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12242 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
12243 return 0;
12245 if (VAR_P (decl)
12246 && DECL_IN_CONSTANT_POOL (decl)
12247 && !lookup_attribute ("omp declare target",
12248 DECL_ATTRIBUTES (decl)))
12250 tree id = get_identifier ("omp declare target");
12251 DECL_ATTRIBUTES (decl)
12252 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
12253 varpool_node *node = varpool_node::get (decl);
12254 if (node)
12256 node->offloadable = 1;
12257 if (ENABLE_OFFLOADING)
12258 g->have_offload = true;
12262 else if (flags & GOVD_SHARED)
12264 if (is_global_var (decl))
12266 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12267 while (ctx != NULL)
12269 splay_tree_node on
12270 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12271 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
12272 | GOVD_PRIVATE | GOVD_REDUCTION
12273 | GOVD_LINEAR | GOVD_MAP)) != 0)
12274 break;
12275 ctx = ctx->outer_context;
12277 if (ctx == NULL)
12278 return 0;
12280 code = OMP_CLAUSE_SHARED;
12281 /* Don't optimize shared into firstprivate for read-only vars
12282 on tasks with depend clause, we shouldn't try to copy them
12283 until the dependencies are satisfied. */
12284 if (gimplify_omp_ctxp->has_depend)
12285 flags |= GOVD_WRITTEN;
12287 else if (flags & GOVD_PRIVATE)
12288 code = OMP_CLAUSE_PRIVATE;
12289 else if (flags & GOVD_FIRSTPRIVATE)
12291 code = OMP_CLAUSE_FIRSTPRIVATE;
12292 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
12293 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12294 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12296 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
12297 "%<target%> construct", decl);
12298 return 0;
12301 else if (flags & GOVD_LASTPRIVATE)
12302 code = OMP_CLAUSE_LASTPRIVATE;
12303 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
12304 return 0;
12305 else if (flags & GOVD_CONDTEMP)
12307 code = OMP_CLAUSE__CONDTEMP_;
12308 gimple_add_tmp_var (decl);
12310 else
12311 gcc_unreachable ();
12313 if (((flags & GOVD_LASTPRIVATE)
12314 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
12315 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12316 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12318 tree chain = *list_p;
12319 clause = build_omp_clause (input_location, code);
12320 OMP_CLAUSE_DECL (clause) = decl;
12321 OMP_CLAUSE_CHAIN (clause) = chain;
12322 if (private_debug)
12323 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
12324 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
12325 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
12326 else if (code == OMP_CLAUSE_SHARED
12327 && (flags & GOVD_WRITTEN) == 0
12328 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12329 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
12330 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
12331 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
12332 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
12334 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
12335 OMP_CLAUSE_DECL (nc) = decl;
12336 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12337 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12338 OMP_CLAUSE_DECL (clause)
12339 = build_simple_mem_ref_loc (input_location, decl);
12340 OMP_CLAUSE_DECL (clause)
12341 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
12342 build_int_cst (build_pointer_type (char_type_node), 0));
12343 OMP_CLAUSE_SIZE (clause) = size_zero_node;
12344 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12345 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
12346 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
12347 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12348 OMP_CLAUSE_CHAIN (nc) = chain;
12349 OMP_CLAUSE_CHAIN (clause) = nc;
12350 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12351 gimplify_omp_ctxp = ctx->outer_context;
12352 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
12353 pre_p, NULL, is_gimple_val, fb_rvalue);
12354 gimplify_omp_ctxp = ctx;
12356 else if (code == OMP_CLAUSE_MAP)
12358 int kind;
12359 /* Not all combinations of these GOVD_MAP flags are actually valid. */
12360 switch (flags & (GOVD_MAP_TO_ONLY
12361 | GOVD_MAP_FORCE
12362 | GOVD_MAP_FORCE_PRESENT
12363 | GOVD_MAP_ALLOC_ONLY
12364 | GOVD_MAP_FROM_ONLY))
12366 case 0:
12367 kind = GOMP_MAP_TOFROM;
12368 break;
12369 case GOVD_MAP_FORCE:
12370 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
12371 break;
12372 case GOVD_MAP_TO_ONLY:
12373 kind = GOMP_MAP_TO;
12374 break;
12375 case GOVD_MAP_FROM_ONLY:
12376 kind = GOMP_MAP_FROM;
12377 break;
12378 case GOVD_MAP_ALLOC_ONLY:
12379 kind = GOMP_MAP_ALLOC;
12380 break;
12381 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
12382 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
12383 break;
12384 case GOVD_MAP_FORCE_PRESENT:
12385 kind = GOMP_MAP_FORCE_PRESENT;
12386 break;
12387 default:
12388 gcc_unreachable ();
12390 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
12391 /* Setting of the implicit flag for the runtime is currently disabled for
12392 OpenACC. */
12393 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
12394 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
12395 if (DECL_SIZE (decl)
12396 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12398 tree decl2 = DECL_VALUE_EXPR (decl);
12399 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
12400 decl2 = TREE_OPERAND (decl2, 0);
12401 gcc_assert (DECL_P (decl2));
12402 tree mem = build_simple_mem_ref (decl2);
12403 OMP_CLAUSE_DECL (clause) = mem;
12404 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12405 if (gimplify_omp_ctxp->outer_context)
12407 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12408 omp_notice_variable (ctx, decl2, true);
12409 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
12411 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12412 OMP_CLAUSE_MAP);
12413 OMP_CLAUSE_DECL (nc) = decl;
12414 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12415 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
12416 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12417 else
12418 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12419 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12420 OMP_CLAUSE_CHAIN (clause) = nc;
12422 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
12423 && omp_privatize_by_reference (decl))
12425 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
12426 OMP_CLAUSE_SIZE (clause)
12427 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
12428 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12429 gimplify_omp_ctxp = ctx->outer_context;
12430 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
12431 pre_p, NULL, is_gimple_val, fb_rvalue);
12432 gimplify_omp_ctxp = ctx;
12433 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12434 OMP_CLAUSE_MAP);
12435 OMP_CLAUSE_DECL (nc) = decl;
12436 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12437 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
12438 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12439 OMP_CLAUSE_CHAIN (clause) = nc;
12441 else
12442 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
12444 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
12446 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
12447 OMP_CLAUSE_DECL (nc) = decl;
12448 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
12449 OMP_CLAUSE_CHAIN (nc) = chain;
12450 OMP_CLAUSE_CHAIN (clause) = nc;
12451 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12452 gimplify_omp_ctxp = ctx->outer_context;
12453 lang_hooks.decls.omp_finish_clause (nc, pre_p,
12454 (ctx->region_type & ORT_ACC) != 0);
12455 gimplify_omp_ctxp = ctx;
12457 *list_p = clause;
12458 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12459 gimplify_omp_ctxp = ctx->outer_context;
12460 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
12461 in simd. Those are only added for the local vars inside of simd body
12462 and they don't need to be e.g. default constructible. */
12463 if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
12464 lang_hooks.decls.omp_finish_clause (clause, pre_p,
12465 (ctx->region_type & ORT_ACC) != 0);
12466 if (gimplify_omp_ctxp)
12467 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
12468 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
12469 && DECL_P (OMP_CLAUSE_SIZE (clause)))
12470 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
12471 true);
12472 gimplify_omp_ctxp = ctx;
12473 return 0;
12476 static void
12477 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
12478 enum tree_code code)
12480 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12481 tree *orig_list_p = list_p;
12482 tree c, decl;
12483 bool has_inscan_reductions = false;
12485 if (body)
12487 struct gimplify_omp_ctx *octx;
12488 for (octx = ctx; octx; octx = octx->outer_context)
12489 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
12490 break;
12491 if (octx)
12493 struct walk_stmt_info wi;
12494 memset (&wi, 0, sizeof (wi));
12495 walk_gimple_seq (body, omp_find_stores_stmt,
12496 omp_find_stores_op, &wi);
12500 if (ctx->add_safelen1)
12502 /* If there are VLAs in the body of simd loop, prevent
12503 vectorization. */
12504 gcc_assert (ctx->region_type == ORT_SIMD);
12505 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
12506 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
12507 OMP_CLAUSE_CHAIN (c) = *list_p;
12508 *list_p = c;
12509 list_p = &OMP_CLAUSE_CHAIN (c);
12512 if (ctx->region_type == ORT_WORKSHARE
12513 && ctx->outer_context
12514 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
12516 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
12517 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12518 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12520 decl = OMP_CLAUSE_DECL (c);
12521 splay_tree_node n
12522 = splay_tree_lookup (ctx->outer_context->variables,
12523 (splay_tree_key) decl);
12524 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
12525 (splay_tree_key) decl));
12526 omp_add_variable (ctx, decl, n->value);
12527 tree c2 = copy_node (c);
12528 OMP_CLAUSE_CHAIN (c2) = *list_p;
12529 *list_p = c2;
12530 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
12531 continue;
12532 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12533 OMP_CLAUSE_FIRSTPRIVATE);
12534 OMP_CLAUSE_DECL (c2) = decl;
12535 OMP_CLAUSE_CHAIN (c2) = *list_p;
12536 *list_p = c2;
12540 tree attach_list = NULL_TREE;
12541 tree *attach_tail = &attach_list;
12543 while ((c = *list_p) != NULL)
12545 splay_tree_node n;
12546 bool remove = false;
12547 bool move_attach = false;
12549 switch (OMP_CLAUSE_CODE (c))
12551 case OMP_CLAUSE_FIRSTPRIVATE:
12552 if ((ctx->region_type & ORT_TARGET)
12553 && (ctx->region_type & ORT_ACC) == 0
12554 && TYPE_ATOMIC (strip_array_types
12555 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
12557 error_at (OMP_CLAUSE_LOCATION (c),
12558 "%<_Atomic%> %qD in %<firstprivate%> clause on "
12559 "%<target%> construct", OMP_CLAUSE_DECL (c));
12560 remove = true;
12561 break;
12563 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12565 decl = OMP_CLAUSE_DECL (c);
12566 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12567 if ((n->value & GOVD_MAP) != 0)
12569 remove = true;
12570 break;
12572 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
12573 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
12575 /* FALLTHRU */
12576 case OMP_CLAUSE_PRIVATE:
12577 case OMP_CLAUSE_SHARED:
12578 case OMP_CLAUSE_LINEAR:
12579 decl = OMP_CLAUSE_DECL (c);
12580 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12581 remove = !(n->value & GOVD_SEEN);
12582 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
12583 && code == OMP_PARALLEL
12584 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12585 remove = true;
12586 if (! remove)
12588 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
12589 if ((n->value & GOVD_DEBUG_PRIVATE)
12590 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
12592 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
12593 || ((n->value & GOVD_DATA_SHARE_CLASS)
12594 == GOVD_SHARED));
12595 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
12596 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
12598 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12599 && ctx->has_depend
12600 && DECL_P (decl))
12601 n->value |= GOVD_WRITTEN;
12602 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12603 && (n->value & GOVD_WRITTEN) == 0
12604 && DECL_P (decl)
12605 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12606 OMP_CLAUSE_SHARED_READONLY (c) = 1;
12607 else if (DECL_P (decl)
12608 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12609 && (n->value & GOVD_WRITTEN) != 0)
12610 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12611 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
12612 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12613 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12615 else
12616 n->value &= ~GOVD_EXPLICIT;
12617 break;
12619 case OMP_CLAUSE_LASTPRIVATE:
12620 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
12621 accurately reflect the presence of a FIRSTPRIVATE clause. */
12622 decl = OMP_CLAUSE_DECL (c);
12623 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12624 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
12625 = (n->value & GOVD_FIRSTPRIVATE) != 0;
12626 if (code == OMP_DISTRIBUTE
12627 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12629 remove = true;
12630 error_at (OMP_CLAUSE_LOCATION (c),
12631 "same variable used in %<firstprivate%> and "
12632 "%<lastprivate%> clauses on %<distribute%> "
12633 "construct");
12635 if (!remove
12636 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12637 && DECL_P (decl)
12638 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12639 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12640 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
12641 remove = true;
12642 break;
12644 case OMP_CLAUSE_ALIGNED:
12645 decl = OMP_CLAUSE_DECL (c);
12646 if (!is_global_var (decl))
12648 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12649 remove = n == NULL || !(n->value & GOVD_SEEN);
12650 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12652 struct gimplify_omp_ctx *octx;
12653 if (n != NULL
12654 && (n->value & (GOVD_DATA_SHARE_CLASS
12655 & ~GOVD_FIRSTPRIVATE)))
12656 remove = true;
12657 else
12658 for (octx = ctx->outer_context; octx;
12659 octx = octx->outer_context)
12661 n = splay_tree_lookup (octx->variables,
12662 (splay_tree_key) decl);
12663 if (n == NULL)
12664 continue;
12665 if (n->value & GOVD_LOCAL)
12666 break;
12667 /* We have to avoid assigning a shared variable
12668 to itself when trying to add
12669 __builtin_assume_aligned. */
12670 if (n->value & GOVD_SHARED)
12672 remove = true;
12673 break;
12678 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
12680 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12681 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
12682 remove = true;
12684 break;
12686 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12687 decl = OMP_CLAUSE_DECL (c);
12688 while (TREE_CODE (decl) == INDIRECT_REF
12689 || TREE_CODE (decl) == ARRAY_REF)
12690 decl = TREE_OPERAND (decl, 0);
12691 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12692 remove = n == NULL || !(n->value & GOVD_SEEN);
12693 break;
12695 case OMP_CLAUSE_IS_DEVICE_PTR:
12696 case OMP_CLAUSE_NONTEMPORAL:
12697 decl = OMP_CLAUSE_DECL (c);
12698 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12699 remove = n == NULL || !(n->value & GOVD_SEEN);
12700 break;
12702 case OMP_CLAUSE_MAP:
12703 if (code == OMP_TARGET_EXIT_DATA
12704 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
12706 remove = true;
12707 break;
12709 /* If we have a target region, we can push all the attaches to the
12710 end of the list (we may have standalone "attach" operations
12711 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
12712 the attachment point AND the pointed-to block have been mapped).
12713 If we have something else, e.g. "enter data", we need to keep
12714 "attach" nodes together with the previous node they attach to so
12715 that separate "exit data" operations work properly (see
12716 libgomp/target.c). */
12717 if ((ctx->region_type & ORT_TARGET) != 0
12718 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12719 || (OMP_CLAUSE_MAP_KIND (c)
12720 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
12721 move_attach = true;
12722 decl = OMP_CLAUSE_DECL (c);
12723 /* Data clauses associated with reductions must be
12724 compatible with present_or_copy. Warn and adjust the clause
12725 if that is not the case. */
12726 if (ctx->region_type == ORT_ACC_PARALLEL
12727 || ctx->region_type == ORT_ACC_SERIAL)
12729 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
12730 n = NULL;
12732 if (DECL_P (t))
12733 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
12735 if (n && (n->value & GOVD_REDUCTION))
12737 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
12739 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
12740 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
12741 && kind != GOMP_MAP_FORCE_PRESENT
12742 && kind != GOMP_MAP_POINTER)
12744 warning_at (OMP_CLAUSE_LOCATION (c), 0,
12745 "incompatible data clause with reduction "
12746 "on %qE; promoting to %<present_or_copy%>",
12747 DECL_NAME (t));
12748 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
12752 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
12753 && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
12755 remove = true;
12756 break;
12758 if (!DECL_P (decl))
12760 if ((ctx->region_type & ORT_TARGET) != 0
12761 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
12763 if (TREE_CODE (decl) == INDIRECT_REF
12764 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
12765 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
12766 == REFERENCE_TYPE))
12767 decl = TREE_OPERAND (decl, 0);
12768 if (TREE_CODE (decl) == COMPONENT_REF)
12770 while (TREE_CODE (decl) == COMPONENT_REF)
12771 decl = TREE_OPERAND (decl, 0);
12772 if (DECL_P (decl))
12774 n = splay_tree_lookup (ctx->variables,
12775 (splay_tree_key) decl);
12776 if (!(n->value & GOVD_SEEN))
12777 remove = true;
12781 break;
12783 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12784 if ((ctx->region_type & ORT_TARGET) != 0
12785 && !(n->value & GOVD_SEEN)
12786 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
12787 && (!is_global_var (decl)
12788 || !lookup_attribute ("omp declare target link",
12789 DECL_ATTRIBUTES (decl))))
12791 remove = true;
12792 /* For struct element mapping, if struct is never referenced
12793 in target block and none of the mapping has always modifier,
12794 remove all the struct element mappings, which immediately
12795 follow the GOMP_MAP_STRUCT map clause. */
12796 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
12798 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
12799 while (cnt--)
12800 OMP_CLAUSE_CHAIN (c)
12801 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
12804 else if (DECL_SIZE (decl)
12805 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
12806 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
12807 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
12808 && (OMP_CLAUSE_MAP_KIND (c)
12809 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12811 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
12812 for these, TREE_CODE (DECL_SIZE (decl)) will always be
12813 INTEGER_CST. */
12814 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
12816 tree decl2 = DECL_VALUE_EXPR (decl);
12817 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
12818 decl2 = TREE_OPERAND (decl2, 0);
12819 gcc_assert (DECL_P (decl2));
12820 tree mem = build_simple_mem_ref (decl2);
12821 OMP_CLAUSE_DECL (c) = mem;
12822 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12823 if (ctx->outer_context)
12825 omp_notice_variable (ctx->outer_context, decl2, true);
12826 omp_notice_variable (ctx->outer_context,
12827 OMP_CLAUSE_SIZE (c), true);
12829 if (((ctx->region_type & ORT_TARGET) != 0
12830 || !ctx->target_firstprivatize_array_bases)
12831 && ((n->value & GOVD_SEEN) == 0
12832 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
12834 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12835 OMP_CLAUSE_MAP);
12836 OMP_CLAUSE_DECL (nc) = decl;
12837 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12838 if (ctx->target_firstprivatize_array_bases)
12839 OMP_CLAUSE_SET_MAP_KIND (nc,
12840 GOMP_MAP_FIRSTPRIVATE_POINTER);
12841 else
12842 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12843 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
12844 OMP_CLAUSE_CHAIN (c) = nc;
12845 c = nc;
12848 else
12850 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12851 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
12852 gcc_assert ((n->value & GOVD_SEEN) == 0
12853 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
12854 == 0));
12856 break;
12858 case OMP_CLAUSE_TO:
12859 case OMP_CLAUSE_FROM:
12860 case OMP_CLAUSE__CACHE_:
12861 decl = OMP_CLAUSE_DECL (c);
12862 if (!DECL_P (decl))
12863 break;
12864 if (DECL_SIZE (decl)
12865 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12867 tree decl2 = DECL_VALUE_EXPR (decl);
12868 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
12869 decl2 = TREE_OPERAND (decl2, 0);
12870 gcc_assert (DECL_P (decl2));
12871 tree mem = build_simple_mem_ref (decl2);
12872 OMP_CLAUSE_DECL (c) = mem;
12873 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12874 if (ctx->outer_context)
12876 omp_notice_variable (ctx->outer_context, decl2, true);
12877 omp_notice_variable (ctx->outer_context,
12878 OMP_CLAUSE_SIZE (c), true);
12881 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12882 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
12883 break;
12885 case OMP_CLAUSE_REDUCTION:
12886 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
12888 decl = OMP_CLAUSE_DECL (c);
12889 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12890 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
12892 remove = true;
12893 error_at (OMP_CLAUSE_LOCATION (c),
12894 "%qD specified in %<inscan%> %<reduction%> clause "
12895 "but not in %<scan%> directive clause", decl);
12896 break;
12898 has_inscan_reductions = true;
12900 /* FALLTHRU */
12901 case OMP_CLAUSE_IN_REDUCTION:
12902 case OMP_CLAUSE_TASK_REDUCTION:
12903 decl = OMP_CLAUSE_DECL (c);
12904 /* OpenACC reductions need a present_or_copy data clause.
12905 Add one if necessary. Emit error when the reduction is private. */
12906 if (ctx->region_type == ORT_ACC_PARALLEL
12907 || ctx->region_type == ORT_ACC_SERIAL)
12909 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12910 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
12912 remove = true;
12913 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
12914 "reduction on %qE", DECL_NAME (decl));
12916 else if ((n->value & GOVD_MAP) == 0)
12918 tree next = OMP_CLAUSE_CHAIN (c);
12919 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
12920 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
12921 OMP_CLAUSE_DECL (nc) = decl;
12922 OMP_CLAUSE_CHAIN (c) = nc;
12923 lang_hooks.decls.omp_finish_clause (nc, pre_p,
12924 (ctx->region_type
12925 & ORT_ACC) != 0);
12926 while (1)
12928 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
12929 if (OMP_CLAUSE_CHAIN (nc) == NULL)
12930 break;
12931 nc = OMP_CLAUSE_CHAIN (nc);
12933 OMP_CLAUSE_CHAIN (nc) = next;
12934 n->value |= GOVD_MAP;
12937 if (DECL_P (decl)
12938 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12939 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12940 break;
12942 case OMP_CLAUSE_ALLOCATE:
12943 decl = OMP_CLAUSE_DECL (c);
12944 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12945 if (n != NULL && !(n->value & GOVD_SEEN))
12947 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
12948 != 0
12949 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
12950 remove = true;
12952 if (!remove
12953 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12954 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
12955 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
12956 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
12957 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
12959 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
12960 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
12961 if (n == NULL)
12963 enum omp_clause_default_kind default_kind
12964 = ctx->default_kind;
12965 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
12966 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12967 true);
12968 ctx->default_kind = default_kind;
12970 else
12971 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12972 true);
12974 break;
12976 case OMP_CLAUSE_COPYIN:
12977 case OMP_CLAUSE_COPYPRIVATE:
12978 case OMP_CLAUSE_IF:
12979 case OMP_CLAUSE_NUM_THREADS:
12980 case OMP_CLAUSE_NUM_TEAMS:
12981 case OMP_CLAUSE_THREAD_LIMIT:
12982 case OMP_CLAUSE_DIST_SCHEDULE:
12983 case OMP_CLAUSE_DEVICE:
12984 case OMP_CLAUSE_SCHEDULE:
12985 case OMP_CLAUSE_NOWAIT:
12986 case OMP_CLAUSE_ORDERED:
12987 case OMP_CLAUSE_DEFAULT:
12988 case OMP_CLAUSE_UNTIED:
12989 case OMP_CLAUSE_COLLAPSE:
12990 case OMP_CLAUSE_FINAL:
12991 case OMP_CLAUSE_MERGEABLE:
12992 case OMP_CLAUSE_PROC_BIND:
12993 case OMP_CLAUSE_SAFELEN:
12994 case OMP_CLAUSE_SIMDLEN:
12995 case OMP_CLAUSE_DEPEND:
12996 case OMP_CLAUSE_DOACROSS:
12997 case OMP_CLAUSE_PRIORITY:
12998 case OMP_CLAUSE_GRAINSIZE:
12999 case OMP_CLAUSE_NUM_TASKS:
13000 case OMP_CLAUSE_NOGROUP:
13001 case OMP_CLAUSE_THREADS:
13002 case OMP_CLAUSE_SIMD:
13003 case OMP_CLAUSE_FILTER:
13004 case OMP_CLAUSE_HINT:
13005 case OMP_CLAUSE_DEFAULTMAP:
13006 case OMP_CLAUSE_ORDER:
13007 case OMP_CLAUSE_BIND:
13008 case OMP_CLAUSE_DETACH:
13009 case OMP_CLAUSE_USE_DEVICE_PTR:
13010 case OMP_CLAUSE_USE_DEVICE_ADDR:
13011 case OMP_CLAUSE_ASYNC:
13012 case OMP_CLAUSE_WAIT:
13013 case OMP_CLAUSE_INDEPENDENT:
13014 case OMP_CLAUSE_NUM_GANGS:
13015 case OMP_CLAUSE_NUM_WORKERS:
13016 case OMP_CLAUSE_VECTOR_LENGTH:
13017 case OMP_CLAUSE_GANG:
13018 case OMP_CLAUSE_WORKER:
13019 case OMP_CLAUSE_VECTOR:
13020 case OMP_CLAUSE_AUTO:
13021 case OMP_CLAUSE_SEQ:
13022 case OMP_CLAUSE_TILE:
13023 case OMP_CLAUSE_IF_PRESENT:
13024 case OMP_CLAUSE_FINALIZE:
13025 case OMP_CLAUSE_INCLUSIVE:
13026 case OMP_CLAUSE_EXCLUSIVE:
13027 break;
13029 case OMP_CLAUSE_NOHOST:
13030 default:
13031 gcc_unreachable ();
13034 if (remove)
13035 *list_p = OMP_CLAUSE_CHAIN (c);
13036 else if (move_attach)
13038 /* Remove attach node from here, separate out into its own list. */
13039 *attach_tail = c;
13040 *list_p = OMP_CLAUSE_CHAIN (c);
13041 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13042 attach_tail = &OMP_CLAUSE_CHAIN (c);
13044 else
13045 list_p = &OMP_CLAUSE_CHAIN (c);
13048 /* Splice attach nodes at the end of the list. */
13049 if (attach_list)
13051 *list_p = attach_list;
13052 list_p = attach_tail;
13055 /* Add in any implicit data sharing. */
13056 struct gimplify_adjust_omp_clauses_data data;
13057 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
13059 /* OpenMP. Implicit clauses are added at the start of the clause list,
13060 but after any non-map clauses. */
13061 tree *implicit_add_list_p = orig_list_p;
13062 while (*implicit_add_list_p
13063 && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
13064 implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
13065 data.list_p = implicit_add_list_p;
13067 else
13068 /* OpenACC. */
13069 data.list_p = list_p;
13070 data.pre_p = pre_p;
13071 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
13073 if (has_inscan_reductions)
13074 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
13075 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13076 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
13078 error_at (OMP_CLAUSE_LOCATION (c),
13079 "%<inscan%> %<reduction%> clause used together with "
13080 "%<linear%> clause for a variable other than loop "
13081 "iterator");
13082 break;
13085 gimplify_omp_ctxp = ctx->outer_context;
13086 delete_omp_context (ctx);
13089 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
13090 -1 if unknown yet (simd is involved, won't be known until vectorization)
13091 and 1 if they do. If SCORES is non-NULL, it should point to an array
13092 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
13093 of the CONSTRUCTS (position -1 if it will never match) followed by
13094 number of constructs in the OpenMP context construct trait. If the
13095 score depends on whether it will be in a declare simd clone or not,
13096 the function returns 2 and there will be two sets of the scores, the first
13097 one for the case that it is not in a declare simd clone, the other
13098 that it is in a declare simd clone. */
13101 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
13102 int *scores)
13104 int matched = 0, cnt = 0;
13105 bool simd_seen = false;
13106 bool target_seen = false;
13107 int declare_simd_cnt = -1;
13108 auto_vec<enum tree_code, 16> codes;
13109 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
13111 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
13112 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
13113 == ORT_TARGET && ctx->code == OMP_TARGET)
13114 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
13115 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
13116 || (ctx->region_type == ORT_SIMD
13117 && ctx->code == OMP_SIMD
13118 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
13120 ++cnt;
13121 if (scores)
13122 codes.safe_push (ctx->code);
13123 else if (matched < nconstructs && ctx->code == constructs[matched])
13125 if (ctx->code == OMP_SIMD)
13127 if (matched)
13128 return 0;
13129 simd_seen = true;
13131 ++matched;
13133 if (ctx->code == OMP_TARGET)
13135 if (scores == NULL)
13136 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
13137 target_seen = true;
13138 break;
13141 else if (ctx->region_type == ORT_WORKSHARE
13142 && ctx->code == OMP_LOOP
13143 && ctx->outer_context
13144 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
13145 && ctx->outer_context->outer_context
13146 && ctx->outer_context->outer_context->code == OMP_LOOP
13147 && ctx->outer_context->outer_context->distribute)
13148 ctx = ctx->outer_context->outer_context;
13149 ctx = ctx->outer_context;
13151 if (!target_seen
13152 && lookup_attribute ("omp declare simd",
13153 DECL_ATTRIBUTES (current_function_decl)))
13155 /* Declare simd is a maybe case, it is supposed to be added only to the
13156 omp-simd-clone.cc added clones and not to the base function. */
13157 declare_simd_cnt = cnt++;
13158 if (scores)
13159 codes.safe_push (OMP_SIMD);
13160 else if (cnt == 0
13161 && constructs[0] == OMP_SIMD)
13163 gcc_assert (matched == 0);
13164 simd_seen = true;
13165 if (++matched == nconstructs)
13166 return -1;
13169 if (tree attr = lookup_attribute ("omp declare variant variant",
13170 DECL_ATTRIBUTES (current_function_decl)))
13172 enum tree_code variant_constructs[5];
13173 int variant_nconstructs = 0;
13174 if (!target_seen)
13175 variant_nconstructs
13176 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
13177 variant_constructs);
13178 for (int i = 0; i < variant_nconstructs; i++)
13180 ++cnt;
13181 if (scores)
13182 codes.safe_push (variant_constructs[i]);
13183 else if (matched < nconstructs
13184 && variant_constructs[i] == constructs[matched])
13186 if (variant_constructs[i] == OMP_SIMD)
13188 if (matched)
13189 return 0;
13190 simd_seen = true;
13192 ++matched;
13196 if (!target_seen
13197 && lookup_attribute ("omp declare target block",
13198 DECL_ATTRIBUTES (current_function_decl)))
13200 if (scores)
13201 codes.safe_push (OMP_TARGET);
13202 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
13203 ++matched;
13205 if (scores)
13207 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
13209 int j = codes.length () - 1;
13210 for (int i = nconstructs - 1; i >= 0; i--)
13212 while (j >= 0
13213 && (pass != 0 || declare_simd_cnt != j)
13214 && constructs[i] != codes[j])
13215 --j;
13216 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
13217 *scores++ = j - 1;
13218 else
13219 *scores++ = j;
13221 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
13222 ? codes.length () - 1 : codes.length ());
13224 return declare_simd_cnt == -1 ? 1 : 2;
13226 if (matched == nconstructs)
13227 return simd_seen ? -1 : 1;
13228 return 0;
13231 /* Gimplify OACC_CACHE. */
13233 static void
13234 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
13236 tree expr = *expr_p;
13238 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
13239 OACC_CACHE);
13240 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
13241 OACC_CACHE);
13243 /* TODO: Do something sensible with this information. */
13245 *expr_p = NULL_TREE;
13248 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
13249 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
13250 kind. The entry kind will replace the one in CLAUSE, while the exit
13251 kind will be used in a new omp_clause and returned to the caller. */
13253 static tree
13254 gimplify_oacc_declare_1 (tree clause)
13256 HOST_WIDE_INT kind, new_op;
13257 bool ret = false;
13258 tree c = NULL;
13260 kind = OMP_CLAUSE_MAP_KIND (clause);
13262 switch (kind)
13264 case GOMP_MAP_ALLOC:
13265 new_op = GOMP_MAP_RELEASE;
13266 ret = true;
13267 break;
13269 case GOMP_MAP_FROM:
13270 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
13271 new_op = GOMP_MAP_FROM;
13272 ret = true;
13273 break;
13275 case GOMP_MAP_TOFROM:
13276 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
13277 new_op = GOMP_MAP_FROM;
13278 ret = true;
13279 break;
13281 case GOMP_MAP_DEVICE_RESIDENT:
13282 case GOMP_MAP_FORCE_DEVICEPTR:
13283 case GOMP_MAP_FORCE_PRESENT:
13284 case GOMP_MAP_LINK:
13285 case GOMP_MAP_POINTER:
13286 case GOMP_MAP_TO:
13287 break;
13289 default:
13290 gcc_unreachable ();
13291 break;
13294 if (ret)
13296 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
13297 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
13298 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
13301 return c;
13304 /* Gimplify OACC_DECLARE. */
13306 static void
13307 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
13309 tree expr = *expr_p;
13310 gomp_target *stmt;
13311 tree clauses, t, decl;
13313 clauses = OACC_DECLARE_CLAUSES (expr);
13315 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
13316 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
13318 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
13320 decl = OMP_CLAUSE_DECL (t);
13322 if (TREE_CODE (decl) == MEM_REF)
13323 decl = TREE_OPERAND (decl, 0);
13325 if (VAR_P (decl) && !is_oacc_declared (decl))
13327 tree attr = get_identifier ("oacc declare target");
13328 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
13329 DECL_ATTRIBUTES (decl));
13332 if (VAR_P (decl)
13333 && !is_global_var (decl)
13334 && DECL_CONTEXT (decl) == current_function_decl)
13336 tree c = gimplify_oacc_declare_1 (t);
13337 if (c)
13339 if (oacc_declare_returns == NULL)
13340 oacc_declare_returns = new hash_map<tree, tree>;
13342 oacc_declare_returns->put (decl, c);
13346 if (gimplify_omp_ctxp)
13347 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
13350 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
13351 clauses);
13353 gimplify_seq_add_stmt (pre_p, stmt);
13355 *expr_p = NULL_TREE;
13358 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
13359 gimplification of the body, as well as scanning the body for used
13360 variables. We need to do this scan now, because variable-sized
13361 decls will be decomposed during gimplification. */
13363 static void
13364 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
13366 tree expr = *expr_p;
13367 gimple *g;
13368 gimple_seq body = NULL;
13370 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
13371 OMP_PARALLEL_COMBINED (expr)
13372 ? ORT_COMBINED_PARALLEL
13373 : ORT_PARALLEL, OMP_PARALLEL);
13375 push_gimplify_context ();
13377 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
13378 if (gimple_code (g) == GIMPLE_BIND)
13379 pop_gimplify_context (g);
13380 else
13381 pop_gimplify_context (NULL);
13383 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
13384 OMP_PARALLEL);
13386 g = gimple_build_omp_parallel (body,
13387 OMP_PARALLEL_CLAUSES (expr),
13388 NULL_TREE, NULL_TREE);
13389 if (OMP_PARALLEL_COMBINED (expr))
13390 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
13391 gimplify_seq_add_stmt (pre_p, g);
13392 *expr_p = NULL_TREE;
13395 /* Gimplify the contents of an OMP_TASK statement. This involves
13396 gimplification of the body, as well as scanning the body for used
13397 variables. We need to do this scan now, because variable-sized
13398 decls will be decomposed during gimplification. */
13400 static void
13401 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
13403 tree expr = *expr_p;
13404 gimple *g;
13405 gimple_seq body = NULL;
13406 bool nowait = false;
13407 bool has_depend = false;
13409 if (OMP_TASK_BODY (expr) == NULL_TREE)
13411 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13412 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
13414 has_depend = true;
13415 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
13417 error_at (OMP_CLAUSE_LOCATION (c),
13418 "%<mutexinoutset%> kind in %<depend%> clause on a "
13419 "%<taskwait%> construct");
13420 break;
13423 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT)
13424 nowait = true;
13425 if (nowait && !has_depend)
13427 error_at (EXPR_LOCATION (expr),
13428 "%<taskwait%> construct with %<nowait%> clause but no "
13429 "%<depend%> clauses");
13430 *expr_p = NULL_TREE;
13431 return;
13435 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
13436 omp_find_clause (OMP_TASK_CLAUSES (expr),
13437 OMP_CLAUSE_UNTIED)
13438 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
13440 if (OMP_TASK_BODY (expr))
13442 push_gimplify_context ();
13444 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
13445 if (gimple_code (g) == GIMPLE_BIND)
13446 pop_gimplify_context (g);
13447 else
13448 pop_gimplify_context (NULL);
13451 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
13452 OMP_TASK);
13454 g = gimple_build_omp_task (body,
13455 OMP_TASK_CLAUSES (expr),
13456 NULL_TREE, NULL_TREE,
13457 NULL_TREE, NULL_TREE, NULL_TREE);
13458 if (OMP_TASK_BODY (expr) == NULL_TREE)
13459 gimple_omp_task_set_taskwait_p (g, true);
13460 gimplify_seq_add_stmt (pre_p, g);
13461 *expr_p = NULL_TREE;
13464 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
13465 force it into a temporary initialized in PRE_P and add firstprivate clause
13466 to ORIG_FOR_STMT. */
13468 static void
13469 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
13470 tree orig_for_stmt)
13472 if (*tp == NULL || is_gimple_constant (*tp))
13473 return;
13475 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
13476 /* Reference to pointer conversion is considered useless,
13477 but is significant for firstprivate clause. Force it
13478 here. */
13479 if (type
13480 && TREE_CODE (type) == POINTER_TYPE
13481 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
13483 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
13484 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
13485 gimplify_and_add (m, pre_p);
13486 *tp = v;
13489 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
13490 OMP_CLAUSE_DECL (c) = *tp;
13491 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
13492 OMP_FOR_CLAUSES (orig_for_stmt) = c;
13495 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
13496 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
13498 static tree
13499 find_standalone_omp_ordered (tree *tp, int *walk_subtrees, void *)
13501 switch (TREE_CODE (*tp))
13503 case OMP_ORDERED:
13504 if (OMP_ORDERED_BODY (*tp) == NULL_TREE)
13505 return *tp;
13506 break;
13507 case OMP_SIMD:
13508 case OMP_PARALLEL:
13509 case OMP_TARGET:
13510 *walk_subtrees = 0;
13511 break;
13512 default:
13513 break;
13515 return NULL_TREE;
13518 /* Gimplify the gross structure of an OMP_FOR statement. */
13520 static enum gimplify_status
13521 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
13523 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
13524 enum gimplify_status ret = GS_ALL_DONE;
13525 enum gimplify_status tret;
13526 gomp_for *gfor;
13527 gimple_seq for_body, for_pre_body;
13528 int i;
13529 bitmap has_decl_expr = NULL;
13530 enum omp_region_type ort = ORT_WORKSHARE;
13531 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
13533 orig_for_stmt = for_stmt = *expr_p;
13535 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
13536 != NULL_TREE);
13537 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13539 tree *data[4] = { NULL, NULL, NULL, NULL };
13540 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
13541 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
13542 find_combined_omp_for, data, NULL);
13543 if (inner_for_stmt == NULL_TREE)
13545 gcc_assert (seen_error ());
13546 *expr_p = NULL_TREE;
13547 return GS_ERROR;
13549 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
13551 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
13552 &OMP_FOR_PRE_BODY (for_stmt));
13553 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
13555 if (OMP_FOR_PRE_BODY (inner_for_stmt))
13557 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
13558 &OMP_FOR_PRE_BODY (for_stmt));
13559 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
13562 if (data[0])
13564 /* We have some statements or variable declarations in between
13565 the composite construct directives. Move them around the
13566 inner_for_stmt. */
13567 data[0] = expr_p;
13568 for (i = 0; i < 3; i++)
13569 if (data[i])
13571 tree t = *data[i];
13572 if (i < 2 && data[i + 1] == &OMP_BODY (t))
13573 data[i + 1] = data[i];
13574 *data[i] = OMP_BODY (t);
13575 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
13576 NULL_TREE, make_node (BLOCK));
13577 OMP_BODY (t) = body;
13578 append_to_statement_list_force (inner_for_stmt,
13579 &BIND_EXPR_BODY (body));
13580 *data[3] = t;
13581 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
13582 gcc_assert (*data[3] == inner_for_stmt);
13584 return GS_OK;
13587 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13588 if (!loop_p
13589 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
13590 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13591 i)) == TREE_LIST
13592 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13593 i)))
13595 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13596 /* Class iterators aren't allowed on OMP_SIMD, so the only
13597 case we need to solve is distribute parallel for. They are
13598 allowed on the loop construct, but that is already handled
13599 in gimplify_omp_loop. */
13600 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
13601 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
13602 && data[1]);
13603 tree orig_decl = TREE_PURPOSE (orig);
13604 tree last = TREE_VALUE (orig);
13605 tree *pc;
13606 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
13607 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
13608 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
13609 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
13610 && OMP_CLAUSE_DECL (*pc) == orig_decl)
13611 break;
13612 if (*pc == NULL_TREE)
13614 tree *spc;
13615 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
13616 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
13617 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
13618 && OMP_CLAUSE_DECL (*spc) == orig_decl)
13619 break;
13620 if (*spc)
13622 tree c = *spc;
13623 *spc = OMP_CLAUSE_CHAIN (c);
13624 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13625 *pc = c;
13628 if (*pc == NULL_TREE)
13630 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
13632 /* private clause will appear only on inner_for_stmt.
13633 Change it into firstprivate, and add private clause
13634 on for_stmt. */
13635 tree c = copy_node (*pc);
13636 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
13637 OMP_FOR_CLAUSES (for_stmt) = c;
13638 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
13639 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
13641 else
13643 /* lastprivate clause will appear on both inner_for_stmt
13644 and for_stmt. Add firstprivate clause to
13645 inner_for_stmt. */
13646 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
13647 OMP_CLAUSE_FIRSTPRIVATE);
13648 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
13649 OMP_CLAUSE_CHAIN (c) = *pc;
13650 *pc = c;
13651 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
13653 tree c = build_omp_clause (UNKNOWN_LOCATION,
13654 OMP_CLAUSE_FIRSTPRIVATE);
13655 OMP_CLAUSE_DECL (c) = last;
13656 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13657 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13658 c = build_omp_clause (UNKNOWN_LOCATION,
13659 *pc ? OMP_CLAUSE_SHARED
13660 : OMP_CLAUSE_FIRSTPRIVATE);
13661 OMP_CLAUSE_DECL (c) = orig_decl;
13662 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13663 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13665 /* Similarly, take care of C++ range for temporaries, those should
13666 be firstprivate on OMP_PARALLEL if any. */
13667 if (data[1])
13668 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13669 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
13670 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13671 i)) == TREE_LIST
13672 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13673 i)))
13675 tree orig
13676 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13677 tree v = TREE_CHAIN (orig);
13678 tree c = build_omp_clause (UNKNOWN_LOCATION,
13679 OMP_CLAUSE_FIRSTPRIVATE);
13680 /* First add firstprivate clause for the __for_end artificial
13681 decl. */
13682 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
13683 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
13684 == REFERENCE_TYPE)
13685 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
13686 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13687 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13688 if (TREE_VEC_ELT (v, 0))
13690 /* And now the same for __for_range artificial decl if it
13691 exists. */
13692 c = build_omp_clause (UNKNOWN_LOCATION,
13693 OMP_CLAUSE_FIRSTPRIVATE);
13694 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
13695 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
13696 == REFERENCE_TYPE)
13697 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
13698 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13699 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13704 switch (TREE_CODE (for_stmt))
13706 case OMP_FOR:
13707 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
13709 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13710 OMP_CLAUSE_SCHEDULE))
13711 error_at (EXPR_LOCATION (for_stmt),
13712 "%qs clause may not appear on non-rectangular %qs",
13713 "schedule", lang_GNU_Fortran () ? "do" : "for");
13714 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
13715 error_at (EXPR_LOCATION (for_stmt),
13716 "%qs clause may not appear on non-rectangular %qs",
13717 "ordered", lang_GNU_Fortran () ? "do" : "for");
13719 break;
13720 case OMP_DISTRIBUTE:
13721 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
13722 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13723 OMP_CLAUSE_DIST_SCHEDULE))
13724 error_at (EXPR_LOCATION (for_stmt),
13725 "%qs clause may not appear on non-rectangular %qs",
13726 "dist_schedule", "distribute");
13727 break;
13728 case OACC_LOOP:
13729 ort = ORT_ACC;
13730 break;
13731 case OMP_TASKLOOP:
13732 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
13734 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13735 OMP_CLAUSE_GRAINSIZE))
13736 error_at (EXPR_LOCATION (for_stmt),
13737 "%qs clause may not appear on non-rectangular %qs",
13738 "grainsize", "taskloop");
13739 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13740 OMP_CLAUSE_NUM_TASKS))
13741 error_at (EXPR_LOCATION (for_stmt),
13742 "%qs clause may not appear on non-rectangular %qs",
13743 "num_tasks", "taskloop");
13745 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
13746 ort = ORT_UNTIED_TASKLOOP;
13747 else
13748 ort = ORT_TASKLOOP;
13749 break;
13750 case OMP_SIMD:
13751 ort = ORT_SIMD;
13752 break;
13753 default:
13754 gcc_unreachable ();
13757 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
13758 clause for the IV. */
13759 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
13761 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
13762 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13763 decl = TREE_OPERAND (t, 0);
13764 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
13765 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13766 && OMP_CLAUSE_DECL (c) == decl)
13768 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
13769 break;
13773 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
13774 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
13775 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
13776 ? OMP_LOOP : TREE_CODE (for_stmt));
13778 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
13779 gimplify_omp_ctxp->distribute = true;
13781 /* Handle OMP_FOR_INIT. */
13782 for_pre_body = NULL;
13783 if ((ort == ORT_SIMD
13784 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
13785 && OMP_FOR_PRE_BODY (for_stmt))
13787 has_decl_expr = BITMAP_ALLOC (NULL);
13788 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
13789 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
13790 == VAR_DECL)
13792 t = OMP_FOR_PRE_BODY (for_stmt);
13793 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
13795 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
13797 tree_stmt_iterator si;
13798 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
13799 tsi_next (&si))
13801 t = tsi_stmt (si);
13802 if (TREE_CODE (t) == DECL_EXPR
13803 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
13804 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
13808 if (OMP_FOR_PRE_BODY (for_stmt))
13810 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
13811 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
13812 else
13814 struct gimplify_omp_ctx ctx;
13815 memset (&ctx, 0, sizeof (ctx));
13816 ctx.region_type = ORT_NONE;
13817 gimplify_omp_ctxp = &ctx;
13818 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
13819 gimplify_omp_ctxp = NULL;
13822 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
13824 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13825 for_stmt = inner_for_stmt;
13827 /* For taskloop, need to gimplify the start, end and step before the
13828 taskloop, outside of the taskloop omp context. */
13829 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
13831 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13833 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13834 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
13835 ? pre_p : &for_pre_body);
13836 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
13837 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
13839 tree v = TREE_OPERAND (t, 1);
13840 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
13841 for_pre_p, orig_for_stmt);
13842 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
13843 for_pre_p, orig_for_stmt);
13845 else
13846 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
13847 orig_for_stmt);
13849 /* Handle OMP_FOR_COND. */
13850 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
13851 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
13853 tree v = TREE_OPERAND (t, 1);
13854 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
13855 for_pre_p, orig_for_stmt);
13856 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
13857 for_pre_p, orig_for_stmt);
13859 else
13860 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
13861 orig_for_stmt);
13863 /* Handle OMP_FOR_INCR. */
13864 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
13865 if (TREE_CODE (t) == MODIFY_EXPR)
13867 decl = TREE_OPERAND (t, 0);
13868 t = TREE_OPERAND (t, 1);
13869 tree *tp = &TREE_OPERAND (t, 1);
13870 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
13871 tp = &TREE_OPERAND (t, 0);
13873 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
13874 orig_for_stmt);
13878 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
13879 OMP_TASKLOOP);
13882 if (orig_for_stmt != for_stmt)
13883 gimplify_omp_ctxp->combined_loop = true;
13885 for_body = NULL;
13886 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
13887 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
13888 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
13889 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
13891 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
13892 bool is_doacross = false;
13893 if (c && walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt),
13894 find_standalone_omp_ordered, NULL))
13896 OMP_CLAUSE_ORDERED_DOACROSS (c) = 1;
13897 is_doacross = true;
13898 int len = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
13899 gimplify_omp_ctxp->loop_iter_var.create (len * 2);
13900 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
13901 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LINEAR)
13903 error_at (OMP_CLAUSE_LOCATION (*pc),
13904 "%<linear%> clause may not be specified together "
13905 "with %<ordered%> clause if stand-alone %<ordered%> "
13906 "construct is nested in it");
13907 *pc = OMP_CLAUSE_CHAIN (*pc);
13909 else
13910 pc = &OMP_CLAUSE_CHAIN (*pc);
13912 int collapse = 1, tile = 0;
13913 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
13914 if (c)
13915 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
13916 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
13917 if (c)
13918 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
13919 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
13920 hash_set<tree> *allocate_uids = NULL;
13921 if (c)
13923 allocate_uids = new hash_set<tree>;
13924 for (; c; c = OMP_CLAUSE_CHAIN (c))
13925 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
13926 allocate_uids->add (OMP_CLAUSE_DECL (c));
13928 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13930 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13931 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13932 decl = TREE_OPERAND (t, 0);
13933 gcc_assert (DECL_P (decl));
13934 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
13935 || POINTER_TYPE_P (TREE_TYPE (decl)));
13936 if (is_doacross)
13938 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
13940 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
13941 if (TREE_CODE (orig_decl) == TREE_LIST)
13943 orig_decl = TREE_PURPOSE (orig_decl);
13944 if (!orig_decl)
13945 orig_decl = decl;
13947 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
13949 else
13950 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
13951 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
13954 if (for_stmt == orig_for_stmt)
13956 tree orig_decl = decl;
13957 if (OMP_FOR_ORIG_DECLS (for_stmt))
13959 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
13960 if (TREE_CODE (orig_decl) == TREE_LIST)
13962 orig_decl = TREE_PURPOSE (orig_decl);
13963 if (!orig_decl)
13964 orig_decl = decl;
13967 if (is_global_var (orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
13968 error_at (EXPR_LOCATION (for_stmt),
13969 "threadprivate iteration variable %qD", orig_decl);
13972 /* Make sure the iteration variable is private. */
13973 tree c = NULL_TREE;
13974 tree c2 = NULL_TREE;
13975 if (orig_for_stmt != for_stmt)
13977 /* Preserve this information until we gimplify the inner simd. */
13978 if (has_decl_expr
13979 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
13980 TREE_PRIVATE (t) = 1;
13982 else if (ort == ORT_SIMD)
13984 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
13985 (splay_tree_key) decl);
13986 omp_is_private (gimplify_omp_ctxp, decl,
13987 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
13988 != 1));
13989 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
13991 omp_notice_variable (gimplify_omp_ctxp, decl, true);
13992 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
13993 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13994 OMP_CLAUSE_LASTPRIVATE);
13995 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
13996 OMP_CLAUSE_LASTPRIVATE))
13997 if (OMP_CLAUSE_DECL (c3) == decl)
13999 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14000 "conditional %<lastprivate%> on loop "
14001 "iterator %qD ignored", decl);
14002 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14003 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14006 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
14008 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14009 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
14010 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
14011 if ((has_decl_expr
14012 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14013 || TREE_PRIVATE (t))
14015 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14016 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14018 struct gimplify_omp_ctx *outer
14019 = gimplify_omp_ctxp->outer_context;
14020 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14022 if (outer->region_type == ORT_WORKSHARE
14023 && outer->combined_loop)
14025 n = splay_tree_lookup (outer->variables,
14026 (splay_tree_key)decl);
14027 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14029 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14030 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14032 else
14034 struct gimplify_omp_ctx *octx = outer->outer_context;
14035 if (octx
14036 && octx->region_type == ORT_COMBINED_PARALLEL
14037 && octx->outer_context
14038 && (octx->outer_context->region_type
14039 == ORT_WORKSHARE)
14040 && octx->outer_context->combined_loop)
14042 octx = octx->outer_context;
14043 n = splay_tree_lookup (octx->variables,
14044 (splay_tree_key)decl);
14045 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14047 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14048 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14055 OMP_CLAUSE_DECL (c) = decl;
14056 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14057 OMP_FOR_CLAUSES (for_stmt) = c;
14058 omp_add_variable (gimplify_omp_ctxp, decl, flags);
14059 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14060 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14061 true);
14063 else
14065 bool lastprivate
14066 = (!has_decl_expr
14067 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
14068 if (TREE_PRIVATE (t))
14069 lastprivate = false;
14070 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
14072 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14073 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
14074 lastprivate = false;
14077 struct gimplify_omp_ctx *outer
14078 = gimplify_omp_ctxp->outer_context;
14079 if (outer && lastprivate)
14080 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14081 true);
14083 c = build_omp_clause (input_location,
14084 lastprivate ? OMP_CLAUSE_LASTPRIVATE
14085 : OMP_CLAUSE_PRIVATE);
14086 OMP_CLAUSE_DECL (c) = decl;
14087 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14088 OMP_FOR_CLAUSES (for_stmt) = c;
14089 omp_add_variable (gimplify_omp_ctxp, decl,
14090 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
14091 | GOVD_EXPLICIT | GOVD_SEEN);
14092 c = NULL_TREE;
14095 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
14097 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14098 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14099 (splay_tree_key) decl);
14100 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
14101 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14102 OMP_CLAUSE_LASTPRIVATE);
14103 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14104 OMP_CLAUSE_LASTPRIVATE))
14105 if (OMP_CLAUSE_DECL (c3) == decl)
14107 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14108 "conditional %<lastprivate%> on loop "
14109 "iterator %qD ignored", decl);
14110 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14111 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14114 else
14115 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
14117 /* If DECL is not a gimple register, create a temporary variable to act
14118 as an iteration counter. This is valid, since DECL cannot be
14119 modified in the body of the loop. Similarly for any iteration vars
14120 in simd with collapse > 1 where the iterator vars must be
14121 lastprivate. And similarly for vars mentioned in allocate clauses. */
14122 if (orig_for_stmt != for_stmt)
14123 var = decl;
14124 else if (!is_gimple_reg (decl)
14125 || (ort == ORT_SIMD
14126 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
14127 || (allocate_uids && allocate_uids->contains (decl)))
14129 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14130 /* Make sure omp_add_variable is not called on it prematurely.
14131 We call it ourselves a few lines later. */
14132 gimplify_omp_ctxp = NULL;
14133 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14134 gimplify_omp_ctxp = ctx;
14135 TREE_OPERAND (t, 0) = var;
14137 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
14139 if (ort == ORT_SIMD
14140 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
14142 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14143 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
14144 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
14145 OMP_CLAUSE_DECL (c2) = var;
14146 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
14147 OMP_FOR_CLAUSES (for_stmt) = c2;
14148 omp_add_variable (gimplify_omp_ctxp, var,
14149 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
14150 if (c == NULL_TREE)
14152 c = c2;
14153 c2 = NULL_TREE;
14156 else
14157 omp_add_variable (gimplify_omp_ctxp, var,
14158 GOVD_PRIVATE | GOVD_SEEN);
14160 else
14161 var = decl;
14163 gimplify_omp_ctxp->in_for_exprs = true;
14164 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14166 tree lb = TREE_OPERAND (t, 1);
14167 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
14168 is_gimple_val, fb_rvalue, false);
14169 ret = MIN (ret, tret);
14170 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
14171 is_gimple_val, fb_rvalue, false);
14173 else
14174 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14175 is_gimple_val, fb_rvalue, false);
14176 gimplify_omp_ctxp->in_for_exprs = false;
14177 ret = MIN (ret, tret);
14178 if (ret == GS_ERROR)
14179 return ret;
14181 /* Handle OMP_FOR_COND. */
14182 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14183 gcc_assert (COMPARISON_CLASS_P (t));
14184 gcc_assert (TREE_OPERAND (t, 0) == decl);
14186 gimplify_omp_ctxp->in_for_exprs = true;
14187 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14189 tree ub = TREE_OPERAND (t, 1);
14190 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
14191 is_gimple_val, fb_rvalue, false);
14192 ret = MIN (ret, tret);
14193 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
14194 is_gimple_val, fb_rvalue, false);
14196 else
14197 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14198 is_gimple_val, fb_rvalue, false);
14199 gimplify_omp_ctxp->in_for_exprs = false;
14200 ret = MIN (ret, tret);
14202 /* Handle OMP_FOR_INCR. */
14203 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14204 switch (TREE_CODE (t))
14206 case PREINCREMENT_EXPR:
14207 case POSTINCREMENT_EXPR:
14209 tree decl = TREE_OPERAND (t, 0);
14210 /* c_omp_for_incr_canonicalize_ptr() should have been
14211 called to massage things appropriately. */
14212 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14214 if (orig_for_stmt != for_stmt)
14215 break;
14216 t = build_int_cst (TREE_TYPE (decl), 1);
14217 if (c)
14218 OMP_CLAUSE_LINEAR_STEP (c) = t;
14219 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14220 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14221 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14222 break;
14225 case PREDECREMENT_EXPR:
14226 case POSTDECREMENT_EXPR:
14227 /* c_omp_for_incr_canonicalize_ptr() should have been
14228 called to massage things appropriately. */
14229 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14230 if (orig_for_stmt != for_stmt)
14231 break;
14232 t = build_int_cst (TREE_TYPE (decl), -1);
14233 if (c)
14234 OMP_CLAUSE_LINEAR_STEP (c) = t;
14235 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14236 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14237 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14238 break;
14240 case MODIFY_EXPR:
14241 gcc_assert (TREE_OPERAND (t, 0) == decl);
14242 TREE_OPERAND (t, 0) = var;
14244 t = TREE_OPERAND (t, 1);
14245 switch (TREE_CODE (t))
14247 case PLUS_EXPR:
14248 if (TREE_OPERAND (t, 1) == decl)
14250 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
14251 TREE_OPERAND (t, 0) = var;
14252 break;
14255 /* Fallthru. */
14256 case MINUS_EXPR:
14257 case POINTER_PLUS_EXPR:
14258 gcc_assert (TREE_OPERAND (t, 0) == decl);
14259 TREE_OPERAND (t, 0) = var;
14260 break;
14261 default:
14262 gcc_unreachable ();
14265 gimplify_omp_ctxp->in_for_exprs = true;
14266 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14267 is_gimple_val, fb_rvalue, false);
14268 ret = MIN (ret, tret);
14269 if (c)
14271 tree step = TREE_OPERAND (t, 1);
14272 tree stept = TREE_TYPE (decl);
14273 if (POINTER_TYPE_P (stept))
14274 stept = sizetype;
14275 step = fold_convert (stept, step);
14276 if (TREE_CODE (t) == MINUS_EXPR)
14277 step = fold_build1 (NEGATE_EXPR, stept, step);
14278 OMP_CLAUSE_LINEAR_STEP (c) = step;
14279 if (step != TREE_OPERAND (t, 1))
14281 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
14282 &for_pre_body, NULL,
14283 is_gimple_val, fb_rvalue, false);
14284 ret = MIN (ret, tret);
14287 gimplify_omp_ctxp->in_for_exprs = false;
14288 break;
14290 default:
14291 gcc_unreachable ();
14294 if (c2)
14296 gcc_assert (c);
14297 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
14300 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
14302 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
14303 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14304 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
14305 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14306 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
14307 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
14308 && OMP_CLAUSE_DECL (c) == decl)
14310 if (is_doacross && (collapse == 1 || i >= collapse))
14311 t = var;
14312 else
14314 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14315 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14316 gcc_assert (TREE_OPERAND (t, 0) == var);
14317 t = TREE_OPERAND (t, 1);
14318 gcc_assert (TREE_CODE (t) == PLUS_EXPR
14319 || TREE_CODE (t) == MINUS_EXPR
14320 || TREE_CODE (t) == POINTER_PLUS_EXPR);
14321 gcc_assert (TREE_OPERAND (t, 0) == var);
14322 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
14323 is_doacross ? var : decl,
14324 TREE_OPERAND (t, 1));
14326 gimple_seq *seq;
14327 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
14328 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
14329 else
14330 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
14331 push_gimplify_context ();
14332 gimplify_assign (decl, t, seq);
14333 gimple *bind = NULL;
14334 if (gimplify_ctxp->temps)
14336 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
14337 *seq = NULL;
14338 gimplify_seq_add_stmt (seq, bind);
14340 pop_gimplify_context (bind);
14343 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
14344 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14346 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14347 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14348 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14349 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14350 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14351 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14352 gcc_assert (COMPARISON_CLASS_P (t));
14353 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14354 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14355 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14359 BITMAP_FREE (has_decl_expr);
14360 delete allocate_uids;
14362 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14363 || (loop_p && orig_for_stmt == for_stmt))
14365 push_gimplify_context ();
14366 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
14368 OMP_FOR_BODY (orig_for_stmt)
14369 = build3 (BIND_EXPR, void_type_node, NULL,
14370 OMP_FOR_BODY (orig_for_stmt), NULL);
14371 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
14375 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
14376 &for_body);
14378 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14379 || (loop_p && orig_for_stmt == for_stmt))
14381 if (gimple_code (g) == GIMPLE_BIND)
14382 pop_gimplify_context (g);
14383 else
14384 pop_gimplify_context (NULL);
14387 if (orig_for_stmt != for_stmt)
14388 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14390 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14391 decl = TREE_OPERAND (t, 0);
14392 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14393 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14394 gimplify_omp_ctxp = ctx->outer_context;
14395 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14396 gimplify_omp_ctxp = ctx;
14397 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
14398 TREE_OPERAND (t, 0) = var;
14399 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14400 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14401 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
14402 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14403 for (int j = i + 1;
14404 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14406 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14407 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14408 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14409 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14411 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14412 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14414 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14415 gcc_assert (COMPARISON_CLASS_P (t));
14416 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14417 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14419 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14420 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14425 gimplify_adjust_omp_clauses (pre_p, for_body,
14426 &OMP_FOR_CLAUSES (orig_for_stmt),
14427 TREE_CODE (orig_for_stmt));
14429 int kind;
14430 switch (TREE_CODE (orig_for_stmt))
14432 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
14433 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
14434 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
14435 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
14436 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
14437 default:
14438 gcc_unreachable ();
14440 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
14442 gimplify_seq_add_seq (pre_p, for_pre_body);
14443 for_pre_body = NULL;
14445 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
14446 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
14447 for_pre_body);
14448 if (orig_for_stmt != for_stmt)
14449 gimple_omp_for_set_combined_p (gfor, true);
14450 if (gimplify_omp_ctxp
14451 && (gimplify_omp_ctxp->combined_loop
14452 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
14453 && gimplify_omp_ctxp->outer_context
14454 && gimplify_omp_ctxp->outer_context->combined_loop)))
14456 gimple_omp_for_set_combined_into_p (gfor, true);
14457 if (gimplify_omp_ctxp->combined_loop)
14458 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
14459 else
14460 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
14463 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14465 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14466 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
14467 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
14468 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14469 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
14470 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
14471 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14472 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
14475 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
14476 constructs with GIMPLE_OMP_TASK sandwiched in between them.
14477 The outer taskloop stands for computing the number of iterations,
14478 counts for collapsed loops and holding taskloop specific clauses.
14479 The task construct stands for the effect of data sharing on the
14480 explicit task it creates and the inner taskloop stands for expansion
14481 of the static loop inside of the explicit task construct. */
14482 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14484 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
14485 tree task_clauses = NULL_TREE;
14486 tree c = *gfor_clauses_ptr;
14487 tree *gtask_clauses_ptr = &task_clauses;
14488 tree outer_for_clauses = NULL_TREE;
14489 tree *gforo_clauses_ptr = &outer_for_clauses;
14490 bitmap lastprivate_uids = NULL;
14491 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
14493 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
14494 if (c)
14496 lastprivate_uids = BITMAP_ALLOC (NULL);
14497 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14498 OMP_CLAUSE_LASTPRIVATE))
14499 bitmap_set_bit (lastprivate_uids,
14500 DECL_UID (OMP_CLAUSE_DECL (c)));
14502 c = *gfor_clauses_ptr;
14504 for (; c; c = OMP_CLAUSE_CHAIN (c))
14505 switch (OMP_CLAUSE_CODE (c))
14507 /* These clauses are allowed on task, move them there. */
14508 case OMP_CLAUSE_SHARED:
14509 case OMP_CLAUSE_FIRSTPRIVATE:
14510 case OMP_CLAUSE_DEFAULT:
14511 case OMP_CLAUSE_IF:
14512 case OMP_CLAUSE_UNTIED:
14513 case OMP_CLAUSE_FINAL:
14514 case OMP_CLAUSE_MERGEABLE:
14515 case OMP_CLAUSE_PRIORITY:
14516 case OMP_CLAUSE_REDUCTION:
14517 case OMP_CLAUSE_IN_REDUCTION:
14518 *gtask_clauses_ptr = c;
14519 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14520 break;
14521 case OMP_CLAUSE_PRIVATE:
14522 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
14524 /* We want private on outer for and firstprivate
14525 on task. */
14526 *gtask_clauses_ptr
14527 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14528 OMP_CLAUSE_FIRSTPRIVATE);
14529 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14530 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14531 openacc);
14532 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14533 *gforo_clauses_ptr = c;
14534 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14536 else
14538 *gtask_clauses_ptr = c;
14539 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14541 break;
14542 /* These clauses go into outer taskloop clauses. */
14543 case OMP_CLAUSE_GRAINSIZE:
14544 case OMP_CLAUSE_NUM_TASKS:
14545 case OMP_CLAUSE_NOGROUP:
14546 *gforo_clauses_ptr = c;
14547 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14548 break;
14549 /* Collapse clause we duplicate on both taskloops. */
14550 case OMP_CLAUSE_COLLAPSE:
14551 *gfor_clauses_ptr = c;
14552 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14553 *gforo_clauses_ptr = copy_node (c);
14554 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14555 break;
14556 /* For lastprivate, keep the clause on inner taskloop, and add
14557 a shared clause on task. If the same decl is also firstprivate,
14558 add also firstprivate clause on the inner taskloop. */
14559 case OMP_CLAUSE_LASTPRIVATE:
14560 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
14562 /* For taskloop C++ lastprivate IVs, we want:
14563 1) private on outer taskloop
14564 2) firstprivate and shared on task
14565 3) lastprivate on inner taskloop */
14566 *gtask_clauses_ptr
14567 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14568 OMP_CLAUSE_FIRSTPRIVATE);
14569 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14570 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14571 openacc);
14572 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14573 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
14574 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14575 OMP_CLAUSE_PRIVATE);
14576 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
14577 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
14578 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
14579 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14581 *gfor_clauses_ptr = c;
14582 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14583 *gtask_clauses_ptr
14584 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
14585 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14586 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
14587 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
14588 gtask_clauses_ptr
14589 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14590 break;
14591 /* Allocate clause we duplicate on task and inner taskloop
14592 if the decl is lastprivate, otherwise just put on task. */
14593 case OMP_CLAUSE_ALLOCATE:
14594 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
14595 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
14597 /* Additionally, put firstprivate clause on task
14598 for the allocator if it is not constant. */
14599 *gtask_clauses_ptr
14600 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14601 OMP_CLAUSE_FIRSTPRIVATE);
14602 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
14603 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
14604 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14606 if (lastprivate_uids
14607 && bitmap_bit_p (lastprivate_uids,
14608 DECL_UID (OMP_CLAUSE_DECL (c))))
14610 *gfor_clauses_ptr = c;
14611 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14612 *gtask_clauses_ptr = copy_node (c);
14613 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14615 else
14617 *gtask_clauses_ptr = c;
14618 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14620 break;
14621 default:
14622 gcc_unreachable ();
14624 *gfor_clauses_ptr = NULL_TREE;
14625 *gtask_clauses_ptr = NULL_TREE;
14626 *gforo_clauses_ptr = NULL_TREE;
14627 BITMAP_FREE (lastprivate_uids);
14628 gimple_set_location (gfor, input_location);
14629 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
14630 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
14631 NULL_TREE, NULL_TREE, NULL_TREE);
14632 gimple_set_location (g, input_location);
14633 gimple_omp_task_set_taskloop_p (g, true);
14634 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
14635 gomp_for *gforo
14636 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
14637 gimple_omp_for_collapse (gfor),
14638 gimple_omp_for_pre_body (gfor));
14639 gimple_omp_for_set_pre_body (gfor, NULL);
14640 gimple_omp_for_set_combined_p (gforo, true);
14641 gimple_omp_for_set_combined_into_p (gfor, true);
14642 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
14644 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
14645 tree v = create_tmp_var (type);
14646 gimple_omp_for_set_index (gforo, i, v);
14647 t = unshare_expr (gimple_omp_for_initial (gfor, i));
14648 gimple_omp_for_set_initial (gforo, i, t);
14649 gimple_omp_for_set_cond (gforo, i,
14650 gimple_omp_for_cond (gfor, i));
14651 t = unshare_expr (gimple_omp_for_final (gfor, i));
14652 gimple_omp_for_set_final (gforo, i, t);
14653 t = unshare_expr (gimple_omp_for_incr (gfor, i));
14654 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
14655 TREE_OPERAND (t, 0) = v;
14656 gimple_omp_for_set_incr (gforo, i, t);
14657 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
14658 OMP_CLAUSE_DECL (t) = v;
14659 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
14660 gimple_omp_for_set_clauses (gforo, t);
14661 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14663 tree *p1 = NULL, *p2 = NULL;
14664 t = gimple_omp_for_initial (gforo, i);
14665 if (TREE_CODE (t) == TREE_VEC)
14666 p1 = &TREE_VEC_ELT (t, 0);
14667 t = gimple_omp_for_final (gforo, i);
14668 if (TREE_CODE (t) == TREE_VEC)
14670 if (p1)
14671 p2 = &TREE_VEC_ELT (t, 0);
14672 else
14673 p1 = &TREE_VEC_ELT (t, 0);
14675 if (p1)
14677 int j;
14678 for (j = 0; j < i; j++)
14679 if (*p1 == gimple_omp_for_index (gfor, j))
14681 *p1 = gimple_omp_for_index (gforo, j);
14682 if (p2)
14683 *p2 = *p1;
14684 break;
14686 gcc_assert (j < i);
14690 gimplify_seq_add_stmt (pre_p, gforo);
14692 else
14693 gimplify_seq_add_stmt (pre_p, gfor);
14695 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
14697 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14698 unsigned lastprivate_conditional = 0;
14699 while (ctx
14700 && (ctx->region_type == ORT_TARGET_DATA
14701 || ctx->region_type == ORT_TASKGROUP))
14702 ctx = ctx->outer_context;
14703 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
14704 for (tree c = gimple_omp_for_clauses (gfor);
14705 c; c = OMP_CLAUSE_CHAIN (c))
14706 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14707 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
14708 ++lastprivate_conditional;
14709 if (lastprivate_conditional)
14711 struct omp_for_data fd;
14712 omp_extract_for_data (gfor, &fd, NULL);
14713 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
14714 lastprivate_conditional);
14715 tree var = create_tmp_var_raw (type);
14716 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
14717 OMP_CLAUSE_DECL (c) = var;
14718 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
14719 gimple_omp_for_set_clauses (gfor, c);
14720 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
14723 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
14725 unsigned lastprivate_conditional = 0;
14726 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
14727 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14728 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
14729 ++lastprivate_conditional;
14730 if (lastprivate_conditional)
14732 struct omp_for_data fd;
14733 omp_extract_for_data (gfor, &fd, NULL);
14734 tree type = unsigned_type_for (fd.iter_type);
14735 while (lastprivate_conditional--)
14737 tree c = build_omp_clause (UNKNOWN_LOCATION,
14738 OMP_CLAUSE__CONDTEMP_);
14739 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
14740 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
14741 gimple_omp_for_set_clauses (gfor, c);
14746 if (ret != GS_ALL_DONE)
14747 return GS_ERROR;
14748 *expr_p = NULL_TREE;
14749 return GS_ALL_DONE;
14752 /* Helper for gimplify_omp_loop, called through walk_tree. */
14754 static tree
14755 note_no_context_vars (tree *tp, int *, void *data)
14757 if (VAR_P (*tp)
14758 && DECL_CONTEXT (*tp) == NULL_TREE
14759 && !is_global_var (*tp))
14761 vec<tree> *d = (vec<tree> *) data;
14762 d->safe_push (*tp);
14763 DECL_CONTEXT (*tp) = current_function_decl;
14765 return NULL_TREE;
14768 /* Gimplify the gross structure of an OMP_LOOP statement. */
14770 static enum gimplify_status
14771 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
14773 tree for_stmt = *expr_p;
14774 tree clauses = OMP_FOR_CLAUSES (for_stmt);
14775 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
14776 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
14777 int i;
14779 /* If order is not present, the behavior is as if order(concurrent)
14780 appeared. */
14781 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
14782 if (order == NULL_TREE)
14784 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
14785 OMP_CLAUSE_CHAIN (order) = clauses;
14786 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
14789 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
14790 if (bind == NULL_TREE)
14792 if (!flag_openmp) /* flag_openmp_simd */
14794 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
14795 kind = OMP_CLAUSE_BIND_TEAMS;
14796 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
14797 kind = OMP_CLAUSE_BIND_PARALLEL;
14798 else
14800 for (; octx; octx = octx->outer_context)
14802 if ((octx->region_type & ORT_ACC) != 0
14803 || octx->region_type == ORT_NONE
14804 || octx->region_type == ORT_IMPLICIT_TARGET)
14805 continue;
14806 break;
14808 if (octx == NULL && !in_omp_construct)
14809 error_at (EXPR_LOCATION (for_stmt),
14810 "%<bind%> clause not specified on a %<loop%> "
14811 "construct not nested inside another OpenMP construct");
14813 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
14814 OMP_CLAUSE_CHAIN (bind) = clauses;
14815 OMP_CLAUSE_BIND_KIND (bind) = kind;
14816 OMP_FOR_CLAUSES (for_stmt) = bind;
14818 else
14819 switch (OMP_CLAUSE_BIND_KIND (bind))
14821 case OMP_CLAUSE_BIND_THREAD:
14822 break;
14823 case OMP_CLAUSE_BIND_PARALLEL:
14824 if (!flag_openmp) /* flag_openmp_simd */
14826 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14827 break;
14829 for (; octx; octx = octx->outer_context)
14830 if (octx->region_type == ORT_SIMD
14831 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
14833 error_at (EXPR_LOCATION (for_stmt),
14834 "%<bind(parallel)%> on a %<loop%> construct nested "
14835 "inside %<simd%> construct");
14836 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14837 break;
14839 kind = OMP_CLAUSE_BIND_PARALLEL;
14840 break;
14841 case OMP_CLAUSE_BIND_TEAMS:
14842 if (!flag_openmp) /* flag_openmp_simd */
14844 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14845 break;
14847 if ((octx
14848 && octx->region_type != ORT_IMPLICIT_TARGET
14849 && octx->region_type != ORT_NONE
14850 && (octx->region_type & ORT_TEAMS) == 0)
14851 || in_omp_construct)
14853 error_at (EXPR_LOCATION (for_stmt),
14854 "%<bind(teams)%> on a %<loop%> region not strictly "
14855 "nested inside of a %<teams%> region");
14856 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14857 break;
14859 kind = OMP_CLAUSE_BIND_TEAMS;
14860 break;
14861 default:
14862 gcc_unreachable ();
14865 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
14866 switch (OMP_CLAUSE_CODE (*pc))
14868 case OMP_CLAUSE_REDUCTION:
14869 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
14871 error_at (OMP_CLAUSE_LOCATION (*pc),
14872 "%<inscan%> %<reduction%> clause on "
14873 "%qs construct", "loop");
14874 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
14876 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
14878 error_at (OMP_CLAUSE_LOCATION (*pc),
14879 "invalid %<task%> reduction modifier on construct "
14880 "other than %<parallel%>, %qs or %<sections%>",
14881 lang_GNU_Fortran () ? "do" : "for");
14882 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
14884 pc = &OMP_CLAUSE_CHAIN (*pc);
14885 break;
14886 case OMP_CLAUSE_LASTPRIVATE:
14887 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14889 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14890 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14891 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
14892 break;
14893 if (OMP_FOR_ORIG_DECLS (for_stmt)
14894 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
14895 i)) == TREE_LIST
14896 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
14897 i)))
14899 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14900 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
14901 break;
14904 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
14906 error_at (OMP_CLAUSE_LOCATION (*pc),
14907 "%<lastprivate%> clause on a %<loop%> construct refers "
14908 "to a variable %qD which is not the loop iterator",
14909 OMP_CLAUSE_DECL (*pc));
14910 *pc = OMP_CLAUSE_CHAIN (*pc);
14911 break;
14913 pc = &OMP_CLAUSE_CHAIN (*pc);
14914 break;
14915 default:
14916 pc = &OMP_CLAUSE_CHAIN (*pc);
14917 break;
14920 TREE_SET_CODE (for_stmt, OMP_SIMD);
14922 int last;
14923 switch (kind)
14925 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
14926 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
14927 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
14929 for (int pass = 1; pass <= last; pass++)
14931 if (pass == 2)
14933 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
14934 make_node (BLOCK));
14935 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
14936 *expr_p = make_node (OMP_PARALLEL);
14937 TREE_TYPE (*expr_p) = void_type_node;
14938 OMP_PARALLEL_BODY (*expr_p) = bind;
14939 OMP_PARALLEL_COMBINED (*expr_p) = 1;
14940 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
14941 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
14942 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14943 if (OMP_FOR_ORIG_DECLS (for_stmt)
14944 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
14945 == TREE_LIST))
14947 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14948 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
14950 *pc = build_omp_clause (UNKNOWN_LOCATION,
14951 OMP_CLAUSE_FIRSTPRIVATE);
14952 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
14953 pc = &OMP_CLAUSE_CHAIN (*pc);
14957 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
14958 tree *pc = &OMP_FOR_CLAUSES (t);
14959 TREE_TYPE (t) = void_type_node;
14960 OMP_FOR_BODY (t) = *expr_p;
14961 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
14962 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
14963 switch (OMP_CLAUSE_CODE (c))
14965 case OMP_CLAUSE_BIND:
14966 case OMP_CLAUSE_ORDER:
14967 case OMP_CLAUSE_COLLAPSE:
14968 *pc = copy_node (c);
14969 pc = &OMP_CLAUSE_CHAIN (*pc);
14970 break;
14971 case OMP_CLAUSE_PRIVATE:
14972 case OMP_CLAUSE_FIRSTPRIVATE:
14973 /* Only needed on innermost. */
14974 break;
14975 case OMP_CLAUSE_LASTPRIVATE:
14976 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
14978 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14979 OMP_CLAUSE_FIRSTPRIVATE);
14980 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
14981 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
14982 pc = &OMP_CLAUSE_CHAIN (*pc);
14984 *pc = copy_node (c);
14985 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
14986 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
14987 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
14989 if (pass != last)
14990 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
14991 else
14992 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
14993 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
14995 pc = &OMP_CLAUSE_CHAIN (*pc);
14996 break;
14997 case OMP_CLAUSE_REDUCTION:
14998 *pc = copy_node (c);
14999 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
15000 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15001 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
15003 auto_vec<tree> no_context_vars;
15004 int walk_subtrees = 0;
15005 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15006 &walk_subtrees, &no_context_vars);
15007 if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
15008 note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
15009 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
15010 note_no_context_vars,
15011 &no_context_vars);
15012 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
15013 note_no_context_vars,
15014 &no_context_vars);
15016 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
15017 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
15018 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15019 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
15020 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
15022 hash_map<tree, tree> decl_map;
15023 decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
15024 decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15025 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
15026 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15027 decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
15028 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
15030 copy_body_data id;
15031 memset (&id, 0, sizeof (id));
15032 id.src_fn = current_function_decl;
15033 id.dst_fn = current_function_decl;
15034 id.src_cfun = cfun;
15035 id.decl_map = &decl_map;
15036 id.copy_decl = copy_decl_no_change;
15037 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
15038 id.transform_new_cfg = true;
15039 id.transform_return_to_modify = false;
15040 id.eh_lp_nr = 0;
15041 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
15042 &id, NULL);
15043 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
15044 &id, NULL);
15046 for (tree d : no_context_vars)
15048 DECL_CONTEXT (d) = NULL_TREE;
15049 DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
15052 else
15054 OMP_CLAUSE_REDUCTION_INIT (*pc)
15055 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
15056 OMP_CLAUSE_REDUCTION_MERGE (*pc)
15057 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
15059 pc = &OMP_CLAUSE_CHAIN (*pc);
15060 break;
15061 default:
15062 gcc_unreachable ();
15064 *pc = NULL_TREE;
15065 *expr_p = t;
15067 return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
15071 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
15072 of OMP_TARGET's body. */
15074 static tree
15075 find_omp_teams (tree *tp, int *walk_subtrees, void *)
15077 *walk_subtrees = 0;
15078 switch (TREE_CODE (*tp))
15080 case OMP_TEAMS:
15081 return *tp;
15082 case BIND_EXPR:
15083 case STATEMENT_LIST:
15084 *walk_subtrees = 1;
15085 break;
15086 default:
15087 break;
15089 return NULL_TREE;
15092 /* Helper function of optimize_target_teams, determine if the expression
15093 can be computed safely before the target construct on the host. */
15095 static tree
15096 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
15098 splay_tree_node n;
15100 if (TYPE_P (*tp))
15102 *walk_subtrees = 0;
15103 return NULL_TREE;
15105 switch (TREE_CODE (*tp))
15107 case VAR_DECL:
15108 case PARM_DECL:
15109 case RESULT_DECL:
15110 *walk_subtrees = 0;
15111 if (error_operand_p (*tp)
15112 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
15113 || DECL_HAS_VALUE_EXPR_P (*tp)
15114 || DECL_THREAD_LOCAL_P (*tp)
15115 || TREE_SIDE_EFFECTS (*tp)
15116 || TREE_THIS_VOLATILE (*tp))
15117 return *tp;
15118 if (is_global_var (*tp)
15119 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
15120 || lookup_attribute ("omp declare target link",
15121 DECL_ATTRIBUTES (*tp))))
15122 return *tp;
15123 if (VAR_P (*tp)
15124 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
15125 && !is_global_var (*tp)
15126 && decl_function_context (*tp) == current_function_decl)
15127 return *tp;
15128 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15129 (splay_tree_key) *tp);
15130 if (n == NULL)
15132 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
15133 return NULL_TREE;
15134 return *tp;
15136 else if (n->value & GOVD_LOCAL)
15137 return *tp;
15138 else if (n->value & GOVD_FIRSTPRIVATE)
15139 return NULL_TREE;
15140 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15141 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15142 return NULL_TREE;
15143 return *tp;
15144 case INTEGER_CST:
15145 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15146 return *tp;
15147 return NULL_TREE;
15148 case TARGET_EXPR:
15149 if (TARGET_EXPR_INITIAL (*tp)
15150 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
15151 return *tp;
15152 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
15153 walk_subtrees, NULL);
15154 /* Allow some reasonable subset of integral arithmetics. */
15155 case PLUS_EXPR:
15156 case MINUS_EXPR:
15157 case MULT_EXPR:
15158 case TRUNC_DIV_EXPR:
15159 case CEIL_DIV_EXPR:
15160 case FLOOR_DIV_EXPR:
15161 case ROUND_DIV_EXPR:
15162 case TRUNC_MOD_EXPR:
15163 case CEIL_MOD_EXPR:
15164 case FLOOR_MOD_EXPR:
15165 case ROUND_MOD_EXPR:
15166 case RDIV_EXPR:
15167 case EXACT_DIV_EXPR:
15168 case MIN_EXPR:
15169 case MAX_EXPR:
15170 case LSHIFT_EXPR:
15171 case RSHIFT_EXPR:
15172 case BIT_IOR_EXPR:
15173 case BIT_XOR_EXPR:
15174 case BIT_AND_EXPR:
15175 case NEGATE_EXPR:
15176 case ABS_EXPR:
15177 case BIT_NOT_EXPR:
15178 case NON_LVALUE_EXPR:
15179 CASE_CONVERT:
15180 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15181 return *tp;
15182 return NULL_TREE;
15183 /* And disallow anything else, except for comparisons. */
15184 default:
15185 if (COMPARISON_CLASS_P (*tp))
15186 return NULL_TREE;
15187 return *tp;
15191 /* Try to determine if the num_teams and/or thread_limit expressions
15192 can have their values determined already before entering the
15193 target construct.
15194 INTEGER_CSTs trivially are,
15195 integral decls that are firstprivate (explicitly or implicitly)
15196 or explicitly map(always, to:) or map(always, tofrom:) on the target
15197 region too, and expressions involving simple arithmetics on those
15198 too, function calls are not ok, dereferencing something neither etc.
15199 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
15200 EXPR based on what we find:
15201 0 stands for clause not specified at all, use implementation default
15202 -1 stands for value that can't be determined easily before entering
15203 the target construct.
15204 If teams construct is not present at all, use 1 for num_teams
15205 and 0 for thread_limit (only one team is involved, and the thread
15206 limit is implementation defined. */
15208 static void
15209 optimize_target_teams (tree target, gimple_seq *pre_p)
15211 tree body = OMP_BODY (target);
15212 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
15213 tree num_teams_lower = NULL_TREE;
15214 tree num_teams_upper = integer_zero_node;
15215 tree thread_limit = integer_zero_node;
15216 location_t num_teams_loc = EXPR_LOCATION (target);
15217 location_t thread_limit_loc = EXPR_LOCATION (target);
15218 tree c, *p, expr;
15219 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
15221 if (teams == NULL_TREE)
15222 num_teams_upper = integer_one_node;
15223 else
15224 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
15226 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
15228 p = &num_teams_upper;
15229 num_teams_loc = OMP_CLAUSE_LOCATION (c);
15230 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
15232 expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
15233 if (TREE_CODE (expr) == INTEGER_CST)
15234 num_teams_lower = expr;
15235 else if (walk_tree (&expr, computable_teams_clause,
15236 NULL, NULL))
15237 num_teams_lower = integer_minus_one_node;
15238 else
15240 num_teams_lower = expr;
15241 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15242 if (gimplify_expr (&num_teams_lower, pre_p, NULL,
15243 is_gimple_val, fb_rvalue, false)
15244 == GS_ERROR)
15246 gimplify_omp_ctxp = target_ctx;
15247 num_teams_lower = integer_minus_one_node;
15249 else
15251 gimplify_omp_ctxp = target_ctx;
15252 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15253 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
15254 = num_teams_lower;
15259 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
15261 p = &thread_limit;
15262 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
15264 else
15265 continue;
15266 expr = OMP_CLAUSE_OPERAND (c, 0);
15267 if (TREE_CODE (expr) == INTEGER_CST)
15269 *p = expr;
15270 continue;
15272 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
15274 *p = integer_minus_one_node;
15275 continue;
15277 *p = expr;
15278 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15279 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
15280 == GS_ERROR)
15282 gimplify_omp_ctxp = target_ctx;
15283 *p = integer_minus_one_node;
15284 continue;
15286 gimplify_omp_ctxp = target_ctx;
15287 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15288 OMP_CLAUSE_OPERAND (c, 0) = *p;
15290 if (!omp_find_clause (OMP_TARGET_CLAUSES (target), OMP_CLAUSE_THREAD_LIMIT))
15292 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
15293 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
15294 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15295 OMP_TARGET_CLAUSES (target) = c;
15297 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
15298 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
15299 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
15300 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15301 OMP_TARGET_CLAUSES (target) = c;
15304 /* Gimplify the gross structure of several OMP constructs. */
15306 static void
15307 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
15309 tree expr = *expr_p;
15310 gimple *stmt;
15311 gimple_seq body = NULL;
15312 enum omp_region_type ort;
15314 switch (TREE_CODE (expr))
15316 case OMP_SECTIONS:
15317 case OMP_SINGLE:
15318 ort = ORT_WORKSHARE;
15319 break;
15320 case OMP_SCOPE:
15321 ort = ORT_TASKGROUP;
15322 break;
15323 case OMP_TARGET:
15324 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
15325 break;
15326 case OACC_KERNELS:
15327 ort = ORT_ACC_KERNELS;
15328 break;
15329 case OACC_PARALLEL:
15330 ort = ORT_ACC_PARALLEL;
15331 break;
15332 case OACC_SERIAL:
15333 ort = ORT_ACC_SERIAL;
15334 break;
15335 case OACC_DATA:
15336 ort = ORT_ACC_DATA;
15337 break;
15338 case OMP_TARGET_DATA:
15339 ort = ORT_TARGET_DATA;
15340 break;
15341 case OMP_TEAMS:
15342 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
15343 if (gimplify_omp_ctxp == NULL
15344 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
15345 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
15346 break;
15347 case OACC_HOST_DATA:
15348 ort = ORT_ACC_HOST_DATA;
15349 break;
15350 default:
15351 gcc_unreachable ();
15354 bool save_in_omp_construct = in_omp_construct;
15355 if ((ort & ORT_ACC) == 0)
15356 in_omp_construct = false;
15357 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
15358 TREE_CODE (expr));
15359 if (TREE_CODE (expr) == OMP_TARGET)
15360 optimize_target_teams (expr, pre_p);
15361 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
15362 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15364 push_gimplify_context ();
15365 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
15366 if (gimple_code (g) == GIMPLE_BIND)
15367 pop_gimplify_context (g);
15368 else
15369 pop_gimplify_context (NULL);
15370 if ((ort & ORT_TARGET_DATA) != 0)
15372 enum built_in_function end_ix;
15373 switch (TREE_CODE (expr))
15375 case OACC_DATA:
15376 case OACC_HOST_DATA:
15377 end_ix = BUILT_IN_GOACC_DATA_END;
15378 break;
15379 case OMP_TARGET_DATA:
15380 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
15381 break;
15382 default:
15383 gcc_unreachable ();
15385 tree fn = builtin_decl_explicit (end_ix);
15386 g = gimple_build_call (fn, 0);
15387 gimple_seq cleanup = NULL;
15388 gimple_seq_add_stmt (&cleanup, g);
15389 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
15390 body = NULL;
15391 gimple_seq_add_stmt (&body, g);
15394 else
15395 gimplify_and_add (OMP_BODY (expr), &body);
15396 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
15397 TREE_CODE (expr));
15398 in_omp_construct = save_in_omp_construct;
15400 switch (TREE_CODE (expr))
15402 case OACC_DATA:
15403 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
15404 OMP_CLAUSES (expr));
15405 break;
15406 case OACC_HOST_DATA:
15407 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
15409 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15410 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
15411 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
15414 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
15415 OMP_CLAUSES (expr));
15416 break;
15417 case OACC_KERNELS:
15418 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
15419 OMP_CLAUSES (expr));
15420 break;
15421 case OACC_PARALLEL:
15422 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
15423 OMP_CLAUSES (expr));
15424 break;
15425 case OACC_SERIAL:
15426 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
15427 OMP_CLAUSES (expr));
15428 break;
15429 case OMP_SECTIONS:
15430 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
15431 break;
15432 case OMP_SINGLE:
15433 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
15434 break;
15435 case OMP_SCOPE:
15436 stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
15437 break;
15438 case OMP_TARGET:
15439 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
15440 OMP_CLAUSES (expr));
15441 break;
15442 case OMP_TARGET_DATA:
15443 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
15444 to be evaluated before the use_device_{ptr,addr} clauses if they
15445 refer to the same variables. */
15447 tree use_device_clauses;
15448 tree *pc, *uc = &use_device_clauses;
15449 for (pc = &OMP_CLAUSES (expr); *pc; )
15450 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
15451 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
15453 *uc = *pc;
15454 *pc = OMP_CLAUSE_CHAIN (*pc);
15455 uc = &OMP_CLAUSE_CHAIN (*uc);
15457 else
15458 pc = &OMP_CLAUSE_CHAIN (*pc);
15459 *uc = NULL_TREE;
15460 *pc = use_device_clauses;
15461 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
15462 OMP_CLAUSES (expr));
15464 break;
15465 case OMP_TEAMS:
15466 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
15467 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15468 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
15469 break;
15470 default:
15471 gcc_unreachable ();
15474 gimplify_seq_add_stmt (pre_p, stmt);
15475 *expr_p = NULL_TREE;
15478 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
15479 target update constructs. */
15481 static void
15482 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
15484 tree expr = *expr_p;
15485 int kind;
15486 gomp_target *stmt;
15487 enum omp_region_type ort = ORT_WORKSHARE;
15489 switch (TREE_CODE (expr))
15491 case OACC_ENTER_DATA:
15492 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
15493 ort = ORT_ACC;
15494 break;
15495 case OACC_EXIT_DATA:
15496 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
15497 ort = ORT_ACC;
15498 break;
15499 case OACC_UPDATE:
15500 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
15501 ort = ORT_ACC;
15502 break;
15503 case OMP_TARGET_UPDATE:
15504 kind = GF_OMP_TARGET_KIND_UPDATE;
15505 break;
15506 case OMP_TARGET_ENTER_DATA:
15507 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
15508 break;
15509 case OMP_TARGET_EXIT_DATA:
15510 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
15511 break;
15512 default:
15513 gcc_unreachable ();
15515 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
15516 ort, TREE_CODE (expr));
15517 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
15518 TREE_CODE (expr));
15519 if (TREE_CODE (expr) == OACC_UPDATE
15520 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15521 OMP_CLAUSE_IF_PRESENT))
15523 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
15524 clause. */
15525 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15526 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15527 switch (OMP_CLAUSE_MAP_KIND (c))
15529 case GOMP_MAP_FORCE_TO:
15530 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
15531 break;
15532 case GOMP_MAP_FORCE_FROM:
15533 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
15534 break;
15535 default:
15536 break;
15539 else if (TREE_CODE (expr) == OACC_EXIT_DATA
15540 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15541 OMP_CLAUSE_FINALIZE))
15543 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
15544 semantics. */
15545 bool have_clause = false;
15546 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15547 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15548 switch (OMP_CLAUSE_MAP_KIND (c))
15550 case GOMP_MAP_FROM:
15551 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
15552 have_clause = true;
15553 break;
15554 case GOMP_MAP_RELEASE:
15555 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
15556 have_clause = true;
15557 break;
15558 case GOMP_MAP_TO_PSET:
15559 /* Fortran arrays with descriptors must map that descriptor when
15560 doing standalone "attach" operations (in OpenACC). In that
15561 case GOMP_MAP_TO_PSET appears by itself with no preceding
15562 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
15563 break;
15564 case GOMP_MAP_POINTER:
15565 /* TODO PR92929: we may see these here, but they'll always follow
15566 one of the clauses above, and will be handled by libgomp as
15567 one group, so no handling required here. */
15568 gcc_assert (have_clause);
15569 break;
15570 case GOMP_MAP_DETACH:
15571 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
15572 have_clause = false;
15573 break;
15574 case GOMP_MAP_STRUCT:
15575 have_clause = false;
15576 break;
15577 default:
15578 gcc_unreachable ();
15581 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
15583 gimplify_seq_add_stmt (pre_p, stmt);
15584 *expr_p = NULL_TREE;
15587 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
15588 stabilized the lhs of the atomic operation as *ADDR. Return true if
15589 EXPR is this stabilized form. */
15591 static bool
15592 goa_lhs_expr_p (tree expr, tree addr)
15594 /* Also include casts to other type variants. The C front end is fond
15595 of adding these for e.g. volatile variables. This is like
15596 STRIP_TYPE_NOPS but includes the main variant lookup. */
15597 STRIP_USELESS_TYPE_CONVERSION (expr);
15599 if (TREE_CODE (expr) == INDIRECT_REF)
15601 expr = TREE_OPERAND (expr, 0);
15602 while (expr != addr
15603 && (CONVERT_EXPR_P (expr)
15604 || TREE_CODE (expr) == NON_LVALUE_EXPR)
15605 && TREE_CODE (expr) == TREE_CODE (addr)
15606 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
15608 expr = TREE_OPERAND (expr, 0);
15609 addr = TREE_OPERAND (addr, 0);
15611 if (expr == addr)
15612 return true;
15613 return (TREE_CODE (addr) == ADDR_EXPR
15614 && TREE_CODE (expr) == ADDR_EXPR
15615 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
15617 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
15618 return true;
15619 return false;
15622 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
15623 expression does not involve the lhs, evaluate it into a temporary.
15624 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
15625 or -1 if an error was encountered. */
15627 static int
15628 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
15629 tree lhs_var, tree &target_expr, bool rhs, int depth)
15631 tree expr = *expr_p;
15632 int saw_lhs = 0;
15634 if (goa_lhs_expr_p (expr, lhs_addr))
15636 if (pre_p)
15637 *expr_p = lhs_var;
15638 return 1;
15640 if (is_gimple_val (expr))
15641 return 0;
15643 /* Maximum depth of lhs in expression is for the
15644 __builtin_clear_padding (...), __builtin_clear_padding (...),
15645 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
15646 if (++depth > 7)
15647 goto finish;
15649 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
15651 case tcc_binary:
15652 case tcc_comparison:
15653 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
15654 lhs_var, target_expr, true, depth);
15655 /* FALLTHRU */
15656 case tcc_unary:
15657 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
15658 lhs_var, target_expr, true, depth);
15659 break;
15660 case tcc_expression:
15661 switch (TREE_CODE (expr))
15663 case TRUTH_ANDIF_EXPR:
15664 case TRUTH_ORIF_EXPR:
15665 case TRUTH_AND_EXPR:
15666 case TRUTH_OR_EXPR:
15667 case TRUTH_XOR_EXPR:
15668 case BIT_INSERT_EXPR:
15669 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15670 lhs_addr, lhs_var, target_expr, true,
15671 depth);
15672 /* FALLTHRU */
15673 case TRUTH_NOT_EXPR:
15674 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15675 lhs_addr, lhs_var, target_expr, true,
15676 depth);
15677 break;
15678 case MODIFY_EXPR:
15679 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
15680 target_expr, true, depth))
15681 break;
15682 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15683 lhs_addr, lhs_var, target_expr, true,
15684 depth);
15685 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15686 lhs_addr, lhs_var, target_expr, false,
15687 depth);
15688 break;
15689 /* FALLTHRU */
15690 case ADDR_EXPR:
15691 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
15692 target_expr, true, depth))
15693 break;
15694 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15695 lhs_addr, lhs_var, target_expr, false,
15696 depth);
15697 break;
15698 case COMPOUND_EXPR:
15699 /* Break out any preevaluations from cp_build_modify_expr. */
15700 for (; TREE_CODE (expr) == COMPOUND_EXPR;
15701 expr = TREE_OPERAND (expr, 1))
15703 /* Special-case __builtin_clear_padding call before
15704 __builtin_memcmp. */
15705 if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
15707 tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
15708 if (fndecl
15709 && fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
15710 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
15711 && (!pre_p
15712 || goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL,
15713 lhs_addr, lhs_var,
15714 target_expr, true, depth)))
15716 if (pre_p)
15717 *expr_p = expr;
15718 saw_lhs = goa_stabilize_expr (&TREE_OPERAND (expr, 0),
15719 pre_p, lhs_addr, lhs_var,
15720 target_expr, true, depth);
15721 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1),
15722 pre_p, lhs_addr, lhs_var,
15723 target_expr, rhs, depth);
15724 return saw_lhs;
15728 if (pre_p)
15729 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
15731 if (!pre_p)
15732 return goa_stabilize_expr (&expr, pre_p, lhs_addr, lhs_var,
15733 target_expr, rhs, depth);
15734 *expr_p = expr;
15735 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
15736 target_expr, rhs, depth);
15737 case COND_EXPR:
15738 if (!goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL, lhs_addr,
15739 lhs_var, target_expr, true, depth))
15740 break;
15741 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15742 lhs_addr, lhs_var, target_expr, true,
15743 depth);
15744 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15745 lhs_addr, lhs_var, target_expr, true,
15746 depth);
15747 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 2), pre_p,
15748 lhs_addr, lhs_var, target_expr, true,
15749 depth);
15750 break;
15751 case TARGET_EXPR:
15752 if (TARGET_EXPR_INITIAL (expr))
15754 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
15755 lhs_var, target_expr, true,
15756 depth))
15757 break;
15758 if (expr == target_expr)
15759 saw_lhs = 1;
15760 else
15762 saw_lhs = goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr),
15763 pre_p, lhs_addr, lhs_var,
15764 target_expr, true, depth);
15765 if (saw_lhs && target_expr == NULL_TREE && pre_p)
15766 target_expr = expr;
15769 break;
15770 default:
15771 break;
15773 break;
15774 case tcc_reference:
15775 if (TREE_CODE (expr) == BIT_FIELD_REF
15776 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
15777 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15778 lhs_addr, lhs_var, target_expr, true,
15779 depth);
15780 break;
15781 case tcc_vl_exp:
15782 if (TREE_CODE (expr) == CALL_EXPR)
15784 if (tree fndecl = get_callee_fndecl (expr))
15785 if (fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
15786 || fndecl_built_in_p (fndecl, BUILT_IN_MEMCMP))
15788 int nargs = call_expr_nargs (expr);
15789 for (int i = 0; i < nargs; i++)
15790 saw_lhs |= goa_stabilize_expr (&CALL_EXPR_ARG (expr, i),
15791 pre_p, lhs_addr, lhs_var,
15792 target_expr, true, depth);
15795 break;
15796 default:
15797 break;
15800 finish:
15801 if (saw_lhs == 0 && pre_p)
15803 enum gimplify_status gs;
15804 if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
15806 gimplify_stmt (&expr, pre_p);
15807 return saw_lhs;
15809 else if (rhs)
15810 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
15811 else
15812 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
15813 if (gs != GS_ALL_DONE)
15814 saw_lhs = -1;
15817 return saw_lhs;
15820 /* Gimplify an OMP_ATOMIC statement. */
15822 static enum gimplify_status
15823 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
15825 tree addr = TREE_OPERAND (*expr_p, 0);
15826 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
15827 ? NULL : TREE_OPERAND (*expr_p, 1);
15828 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
15829 tree tmp_load;
15830 gomp_atomic_load *loadstmt;
15831 gomp_atomic_store *storestmt;
15832 tree target_expr = NULL_TREE;
15834 tmp_load = create_tmp_reg (type);
15835 if (rhs
15836 && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load, target_expr,
15837 true, 0) < 0)
15838 return GS_ERROR;
15840 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
15841 != GS_ALL_DONE)
15842 return GS_ERROR;
15844 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
15845 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
15846 gimplify_seq_add_stmt (pre_p, loadstmt);
15847 if (rhs)
15849 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
15850 representatives. Use BIT_FIELD_REF on the lhs instead. */
15851 tree rhsarg = rhs;
15852 if (TREE_CODE (rhs) == COND_EXPR)
15853 rhsarg = TREE_OPERAND (rhs, 1);
15854 if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
15855 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
15857 tree bitpos = TREE_OPERAND (rhsarg, 2);
15858 tree op1 = TREE_OPERAND (rhsarg, 1);
15859 tree bitsize;
15860 tree tmp_store = tmp_load;
15861 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
15862 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
15863 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
15864 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
15865 else
15866 bitsize = TYPE_SIZE (TREE_TYPE (op1));
15867 gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
15868 tree t = build2_loc (EXPR_LOCATION (rhsarg),
15869 MODIFY_EXPR, void_type_node,
15870 build3_loc (EXPR_LOCATION (rhsarg),
15871 BIT_FIELD_REF, TREE_TYPE (op1),
15872 tmp_store, bitsize, bitpos), op1);
15873 if (TREE_CODE (rhs) == COND_EXPR)
15874 t = build3_loc (EXPR_LOCATION (rhs), COND_EXPR, void_type_node,
15875 TREE_OPERAND (rhs, 0), t, void_node);
15876 gimplify_and_add (t, pre_p);
15877 rhs = tmp_store;
15879 bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
15880 if (TREE_CODE (rhs) == COND_EXPR)
15881 gimplify_ctxp->allow_rhs_cond_expr = true;
15882 enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
15883 is_gimple_val, fb_rvalue);
15884 gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
15885 if (gs != GS_ALL_DONE)
15886 return GS_ERROR;
15889 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
15890 rhs = tmp_load;
15891 storestmt
15892 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
15893 if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
15895 gimple_omp_atomic_set_weak (loadstmt);
15896 gimple_omp_atomic_set_weak (storestmt);
15898 gimplify_seq_add_stmt (pre_p, storestmt);
15899 switch (TREE_CODE (*expr_p))
15901 case OMP_ATOMIC_READ:
15902 case OMP_ATOMIC_CAPTURE_OLD:
15903 *expr_p = tmp_load;
15904 gimple_omp_atomic_set_need_value (loadstmt);
15905 break;
15906 case OMP_ATOMIC_CAPTURE_NEW:
15907 *expr_p = rhs;
15908 gimple_omp_atomic_set_need_value (storestmt);
15909 break;
15910 default:
15911 *expr_p = NULL;
15912 break;
15915 return GS_ALL_DONE;
15918 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
15919 body, and adding some EH bits. */
15921 static enum gimplify_status
15922 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
15924 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
15925 gimple *body_stmt;
15926 gtransaction *trans_stmt;
15927 gimple_seq body = NULL;
15928 int subcode = 0;
15930 /* Wrap the transaction body in a BIND_EXPR so we have a context
15931 where to put decls for OMP. */
15932 if (TREE_CODE (tbody) != BIND_EXPR)
15934 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
15935 TREE_SIDE_EFFECTS (bind) = 1;
15936 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
15937 TRANSACTION_EXPR_BODY (expr) = bind;
15940 push_gimplify_context ();
15941 temp = voidify_wrapper_expr (*expr_p, NULL);
15943 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
15944 pop_gimplify_context (body_stmt);
15946 trans_stmt = gimple_build_transaction (body);
15947 if (TRANSACTION_EXPR_OUTER (expr))
15948 subcode = GTMA_IS_OUTER;
15949 else if (TRANSACTION_EXPR_RELAXED (expr))
15950 subcode = GTMA_IS_RELAXED;
15951 gimple_transaction_set_subcode (trans_stmt, subcode);
15953 gimplify_seq_add_stmt (pre_p, trans_stmt);
15955 if (temp)
15957 *expr_p = temp;
15958 return GS_OK;
15961 *expr_p = NULL_TREE;
15962 return GS_ALL_DONE;
15965 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
15966 is the OMP_BODY of the original EXPR (which has already been
15967 gimplified so it's not present in the EXPR).
15969 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
15971 static gimple *
15972 gimplify_omp_ordered (tree expr, gimple_seq body)
15974 tree c, decls;
15975 int failures = 0;
15976 unsigned int i;
15977 tree source_c = NULL_TREE;
15978 tree sink_c = NULL_TREE;
15980 if (gimplify_omp_ctxp)
15982 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15983 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
15984 && gimplify_omp_ctxp->loop_iter_var.is_empty ())
15986 error_at (OMP_CLAUSE_LOCATION (c),
15987 "%<ordered%> construct with %qs clause must be "
15988 "closely nested inside a loop with %<ordered%> clause",
15989 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross");
15990 failures++;
15992 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
15993 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
15995 bool fail = false;
15996 sink_c = c;
15997 if (OMP_CLAUSE_DECL (c) == NULL_TREE)
15998 continue; /* omp_cur_iteration - 1 */
15999 for (decls = OMP_CLAUSE_DECL (c), i = 0;
16000 decls && TREE_CODE (decls) == TREE_LIST;
16001 decls = TREE_CHAIN (decls), ++i)
16002 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
16003 continue;
16004 else if (TREE_VALUE (decls)
16005 != gimplify_omp_ctxp->loop_iter_var[2 * i])
16007 error_at (OMP_CLAUSE_LOCATION (c),
16008 "variable %qE is not an iteration "
16009 "of outermost loop %d, expected %qE",
16010 TREE_VALUE (decls), i + 1,
16011 gimplify_omp_ctxp->loop_iter_var[2 * i]);
16012 fail = true;
16013 failures++;
16015 else
16016 TREE_VALUE (decls)
16017 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
16018 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
16020 error_at (OMP_CLAUSE_LOCATION (c),
16021 "number of variables in %qs clause with "
16022 "%<sink%> modifier does not match number of "
16023 "iteration variables",
16024 OMP_CLAUSE_DOACROSS_DEPEND (c)
16025 ? "depend" : "doacross");
16026 failures++;
16029 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16030 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SOURCE)
16032 if (source_c)
16034 error_at (OMP_CLAUSE_LOCATION (c),
16035 "more than one %qs clause with %<source%> "
16036 "modifier on an %<ordered%> construct",
16037 OMP_CLAUSE_DOACROSS_DEPEND (source_c)
16038 ? "depend" : "doacross");
16039 failures++;
16041 else
16042 source_c = c;
16045 if (source_c && sink_c)
16047 error_at (OMP_CLAUSE_LOCATION (source_c),
16048 "%qs clause with %<source%> modifier specified "
16049 "together with %qs clauses with %<sink%> modifier "
16050 "on the same construct",
16051 OMP_CLAUSE_DOACROSS_DEPEND (source_c) ? "depend" : "doacross",
16052 OMP_CLAUSE_DOACROSS_DEPEND (sink_c) ? "depend" : "doacross");
16053 failures++;
16056 if (failures)
16057 return gimple_build_nop ();
16058 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
16061 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
16062 expression produces a value to be used as an operand inside a GIMPLE
16063 statement, the value will be stored back in *EXPR_P. This value will
16064 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
16065 an SSA_NAME. The corresponding sequence of GIMPLE statements is
16066 emitted in PRE_P and POST_P.
16068 Additionally, this process may overwrite parts of the input
16069 expression during gimplification. Ideally, it should be
16070 possible to do non-destructive gimplification.
16072 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
16073 the expression needs to evaluate to a value to be used as
16074 an operand in a GIMPLE statement, this value will be stored in
16075 *EXPR_P on exit. This happens when the caller specifies one
16076 of fb_lvalue or fb_rvalue fallback flags.
16078 PRE_P will contain the sequence of GIMPLE statements corresponding
16079 to the evaluation of EXPR and all the side-effects that must
16080 be executed before the main expression. On exit, the last
16081 statement of PRE_P is the core statement being gimplified. For
16082 instance, when gimplifying 'if (++a)' the last statement in
16083 PRE_P will be 'if (t.1)' where t.1 is the result of
16084 pre-incrementing 'a'.
16086 POST_P will contain the sequence of GIMPLE statements corresponding
16087 to the evaluation of all the side-effects that must be executed
16088 after the main expression. If this is NULL, the post
16089 side-effects are stored at the end of PRE_P.
16091 The reason why the output is split in two is to handle post
16092 side-effects explicitly. In some cases, an expression may have
16093 inner and outer post side-effects which need to be emitted in
16094 an order different from the one given by the recursive
16095 traversal. For instance, for the expression (*p--)++ the post
16096 side-effects of '--' must actually occur *after* the post
16097 side-effects of '++'. However, gimplification will first visit
16098 the inner expression, so if a separate POST sequence was not
16099 used, the resulting sequence would be:
16101 1 t.1 = *p
16102 2 p = p - 1
16103 3 t.2 = t.1 + 1
16104 4 *p = t.2
16106 However, the post-decrement operation in line #2 must not be
16107 evaluated until after the store to *p at line #4, so the
16108 correct sequence should be:
16110 1 t.1 = *p
16111 2 t.2 = t.1 + 1
16112 3 *p = t.2
16113 4 p = p - 1
16115 So, by specifying a separate post queue, it is possible
16116 to emit the post side-effects in the correct order.
16117 If POST_P is NULL, an internal queue will be used. Before
16118 returning to the caller, the sequence POST_P is appended to
16119 the main output sequence PRE_P.
16121 GIMPLE_TEST_F points to a function that takes a tree T and
16122 returns nonzero if T is in the GIMPLE form requested by the
16123 caller. The GIMPLE predicates are in gimple.cc.
16125 FALLBACK tells the function what sort of a temporary we want if
16126 gimplification cannot produce an expression that complies with
16127 GIMPLE_TEST_F.
16129 fb_none means that no temporary should be generated
16130 fb_rvalue means that an rvalue is OK to generate
16131 fb_lvalue means that an lvalue is OK to generate
16132 fb_either means that either is OK, but an lvalue is preferable.
16133 fb_mayfail means that gimplification may fail (in which case
16134 GS_ERROR will be returned)
16136 The return value is either GS_ERROR or GS_ALL_DONE, since this
16137 function iterates until EXPR is completely gimplified or an error
16138 occurs. */
16140 enum gimplify_status
16141 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
16142 bool (*gimple_test_f) (tree), fallback_t fallback)
16144 tree tmp;
16145 gimple_seq internal_pre = NULL;
16146 gimple_seq internal_post = NULL;
16147 tree save_expr;
16148 bool is_statement;
16149 location_t saved_location;
16150 enum gimplify_status ret;
16151 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
16152 tree label;
16154 save_expr = *expr_p;
16155 if (save_expr == NULL_TREE)
16156 return GS_ALL_DONE;
16158 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
16159 is_statement = gimple_test_f == is_gimple_stmt;
16160 if (is_statement)
16161 gcc_assert (pre_p);
16163 /* Consistency checks. */
16164 if (gimple_test_f == is_gimple_reg)
16165 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
16166 else if (gimple_test_f == is_gimple_val
16167 || gimple_test_f == is_gimple_call_addr
16168 || gimple_test_f == is_gimple_condexpr_for_cond
16169 || gimple_test_f == is_gimple_mem_rhs
16170 || gimple_test_f == is_gimple_mem_rhs_or_call
16171 || gimple_test_f == is_gimple_reg_rhs
16172 || gimple_test_f == is_gimple_reg_rhs_or_call
16173 || gimple_test_f == is_gimple_asm_val
16174 || gimple_test_f == is_gimple_mem_ref_addr)
16175 gcc_assert (fallback & fb_rvalue);
16176 else if (gimple_test_f == is_gimple_min_lval
16177 || gimple_test_f == is_gimple_lvalue)
16178 gcc_assert (fallback & fb_lvalue);
16179 else if (gimple_test_f == is_gimple_addressable)
16180 gcc_assert (fallback & fb_either);
16181 else if (gimple_test_f == is_gimple_stmt)
16182 gcc_assert (fallback == fb_none);
16183 else
16185 /* We should have recognized the GIMPLE_TEST_F predicate to
16186 know what kind of fallback to use in case a temporary is
16187 needed to hold the value or address of *EXPR_P. */
16188 gcc_unreachable ();
16191 /* We used to check the predicate here and return immediately if it
16192 succeeds. This is wrong; the design is for gimplification to be
16193 idempotent, and for the predicates to only test for valid forms, not
16194 whether they are fully simplified. */
16195 if (pre_p == NULL)
16196 pre_p = &internal_pre;
16198 if (post_p == NULL)
16199 post_p = &internal_post;
16201 /* Remember the last statements added to PRE_P and POST_P. Every
16202 new statement added by the gimplification helpers needs to be
16203 annotated with location information. To centralize the
16204 responsibility, we remember the last statement that had been
16205 added to both queues before gimplifying *EXPR_P. If
16206 gimplification produces new statements in PRE_P and POST_P, those
16207 statements will be annotated with the same location information
16208 as *EXPR_P. */
16209 pre_last_gsi = gsi_last (*pre_p);
16210 post_last_gsi = gsi_last (*post_p);
16212 saved_location = input_location;
16213 if (save_expr != error_mark_node
16214 && EXPR_HAS_LOCATION (*expr_p))
16215 input_location = EXPR_LOCATION (*expr_p);
16217 /* Loop over the specific gimplifiers until the toplevel node
16218 remains the same. */
16221 /* Strip away as many useless type conversions as possible
16222 at the toplevel. */
16223 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
16225 /* Remember the expr. */
16226 save_expr = *expr_p;
16228 /* Die, die, die, my darling. */
16229 if (error_operand_p (save_expr))
16231 ret = GS_ERROR;
16232 break;
16235 /* Do any language-specific gimplification. */
16236 ret = ((enum gimplify_status)
16237 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
16238 if (ret == GS_OK)
16240 if (*expr_p == NULL_TREE)
16241 break;
16242 if (*expr_p != save_expr)
16243 continue;
16245 else if (ret != GS_UNHANDLED)
16246 break;
16248 /* Make sure that all the cases set 'ret' appropriately. */
16249 ret = GS_UNHANDLED;
16250 switch (TREE_CODE (*expr_p))
16252 /* First deal with the special cases. */
16254 case POSTINCREMENT_EXPR:
16255 case POSTDECREMENT_EXPR:
16256 case PREINCREMENT_EXPR:
16257 case PREDECREMENT_EXPR:
16258 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
16259 fallback != fb_none,
16260 TREE_TYPE (*expr_p));
16261 break;
16263 case VIEW_CONVERT_EXPR:
16264 if ((fallback & fb_rvalue)
16265 && is_gimple_reg_type (TREE_TYPE (*expr_p))
16266 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
16268 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16269 post_p, is_gimple_val, fb_rvalue);
16270 recalculate_side_effects (*expr_p);
16271 break;
16273 /* Fallthru. */
16275 case ARRAY_REF:
16276 case ARRAY_RANGE_REF:
16277 case REALPART_EXPR:
16278 case IMAGPART_EXPR:
16279 case COMPONENT_REF:
16280 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
16281 fallback ? fallback : fb_rvalue);
16282 break;
16284 case COND_EXPR:
16285 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
16287 /* C99 code may assign to an array in a structure value of a
16288 conditional expression, and this has undefined behavior
16289 only on execution, so create a temporary if an lvalue is
16290 required. */
16291 if (fallback == fb_lvalue)
16293 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16294 mark_addressable (*expr_p);
16295 ret = GS_OK;
16297 break;
16299 case CALL_EXPR:
16300 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
16302 /* C99 code may assign to an array in a structure returned
16303 from a function, and this has undefined behavior only on
16304 execution, so create a temporary if an lvalue is
16305 required. */
16306 if (fallback == fb_lvalue)
16308 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16309 mark_addressable (*expr_p);
16310 ret = GS_OK;
16312 break;
16314 case TREE_LIST:
16315 gcc_unreachable ();
16317 case COMPOUND_EXPR:
16318 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
16319 break;
16321 case COMPOUND_LITERAL_EXPR:
16322 ret = gimplify_compound_literal_expr (expr_p, pre_p,
16323 gimple_test_f, fallback);
16324 break;
16326 case MODIFY_EXPR:
16327 case INIT_EXPR:
16328 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
16329 fallback != fb_none);
16330 break;
16332 case TRUTH_ANDIF_EXPR:
16333 case TRUTH_ORIF_EXPR:
16335 /* Preserve the original type of the expression and the
16336 source location of the outer expression. */
16337 tree org_type = TREE_TYPE (*expr_p);
16338 *expr_p = gimple_boolify (*expr_p);
16339 *expr_p = build3_loc (input_location, COND_EXPR,
16340 org_type, *expr_p,
16341 fold_convert_loc
16342 (input_location,
16343 org_type, boolean_true_node),
16344 fold_convert_loc
16345 (input_location,
16346 org_type, boolean_false_node));
16347 ret = GS_OK;
16348 break;
16351 case TRUTH_NOT_EXPR:
16353 tree type = TREE_TYPE (*expr_p);
16354 /* The parsers are careful to generate TRUTH_NOT_EXPR
16355 only with operands that are always zero or one.
16356 We do not fold here but handle the only interesting case
16357 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
16358 *expr_p = gimple_boolify (*expr_p);
16359 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
16360 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
16361 TREE_TYPE (*expr_p),
16362 TREE_OPERAND (*expr_p, 0));
16363 else
16364 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
16365 TREE_TYPE (*expr_p),
16366 TREE_OPERAND (*expr_p, 0),
16367 build_int_cst (TREE_TYPE (*expr_p), 1));
16368 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
16369 *expr_p = fold_convert_loc (input_location, type, *expr_p);
16370 ret = GS_OK;
16371 break;
16374 case ADDR_EXPR:
16375 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
16376 break;
16378 case ANNOTATE_EXPR:
16380 tree cond = TREE_OPERAND (*expr_p, 0);
16381 tree kind = TREE_OPERAND (*expr_p, 1);
16382 tree data = TREE_OPERAND (*expr_p, 2);
16383 tree type = TREE_TYPE (cond);
16384 if (!INTEGRAL_TYPE_P (type))
16386 *expr_p = cond;
16387 ret = GS_OK;
16388 break;
16390 tree tmp = create_tmp_var (type);
16391 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
16392 gcall *call
16393 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
16394 gimple_call_set_lhs (call, tmp);
16395 gimplify_seq_add_stmt (pre_p, call);
16396 *expr_p = tmp;
16397 ret = GS_ALL_DONE;
16398 break;
16401 case VA_ARG_EXPR:
16402 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
16403 break;
16405 CASE_CONVERT:
16406 if (IS_EMPTY_STMT (*expr_p))
16408 ret = GS_ALL_DONE;
16409 break;
16412 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
16413 || fallback == fb_none)
16415 /* Just strip a conversion to void (or in void context) and
16416 try again. */
16417 *expr_p = TREE_OPERAND (*expr_p, 0);
16418 ret = GS_OK;
16419 break;
16422 ret = gimplify_conversion (expr_p);
16423 if (ret == GS_ERROR)
16424 break;
16425 if (*expr_p != save_expr)
16426 break;
16427 /* FALLTHRU */
16429 case FIX_TRUNC_EXPR:
16430 /* unary_expr: ... | '(' cast ')' val | ... */
16431 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16432 is_gimple_val, fb_rvalue);
16433 recalculate_side_effects (*expr_p);
16434 break;
16436 case INDIRECT_REF:
16438 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
16439 bool notrap = TREE_THIS_NOTRAP (*expr_p);
16440 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
16442 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
16443 if (*expr_p != save_expr)
16445 ret = GS_OK;
16446 break;
16449 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16450 is_gimple_reg, fb_rvalue);
16451 if (ret == GS_ERROR)
16452 break;
16454 recalculate_side_effects (*expr_p);
16455 *expr_p = fold_build2_loc (input_location, MEM_REF,
16456 TREE_TYPE (*expr_p),
16457 TREE_OPERAND (*expr_p, 0),
16458 build_int_cst (saved_ptr_type, 0));
16459 TREE_THIS_VOLATILE (*expr_p) = volatilep;
16460 TREE_THIS_NOTRAP (*expr_p) = notrap;
16461 ret = GS_OK;
16462 break;
16465 /* We arrive here through the various re-gimplifcation paths. */
16466 case MEM_REF:
16467 /* First try re-folding the whole thing. */
16468 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
16469 TREE_OPERAND (*expr_p, 0),
16470 TREE_OPERAND (*expr_p, 1));
16471 if (tmp)
16473 REF_REVERSE_STORAGE_ORDER (tmp)
16474 = REF_REVERSE_STORAGE_ORDER (*expr_p);
16475 *expr_p = tmp;
16476 recalculate_side_effects (*expr_p);
16477 ret = GS_OK;
16478 break;
16480 /* Avoid re-gimplifying the address operand if it is already
16481 in suitable form. Re-gimplifying would mark the address
16482 operand addressable. Always gimplify when not in SSA form
16483 as we still may have to gimplify decls with value-exprs. */
16484 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
16485 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
16487 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16488 is_gimple_mem_ref_addr, fb_rvalue);
16489 if (ret == GS_ERROR)
16490 break;
16492 recalculate_side_effects (*expr_p);
16493 ret = GS_ALL_DONE;
16494 break;
16496 /* Constants need not be gimplified. */
16497 case INTEGER_CST:
16498 case REAL_CST:
16499 case FIXED_CST:
16500 case STRING_CST:
16501 case COMPLEX_CST:
16502 case VECTOR_CST:
16503 /* Drop the overflow flag on constants, we do not want
16504 that in the GIMPLE IL. */
16505 if (TREE_OVERFLOW_P (*expr_p))
16506 *expr_p = drop_tree_overflow (*expr_p);
16507 ret = GS_ALL_DONE;
16508 break;
16510 case CONST_DECL:
16511 /* If we require an lvalue, such as for ADDR_EXPR, retain the
16512 CONST_DECL node. Otherwise the decl is replaceable by its
16513 value. */
16514 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
16515 if (fallback & fb_lvalue)
16516 ret = GS_ALL_DONE;
16517 else
16519 *expr_p = DECL_INITIAL (*expr_p);
16520 ret = GS_OK;
16522 break;
16524 case DECL_EXPR:
16525 ret = gimplify_decl_expr (expr_p, pre_p);
16526 break;
16528 case BIND_EXPR:
16529 ret = gimplify_bind_expr (expr_p, pre_p);
16530 break;
16532 case LOOP_EXPR:
16533 ret = gimplify_loop_expr (expr_p, pre_p);
16534 break;
16536 case SWITCH_EXPR:
16537 ret = gimplify_switch_expr (expr_p, pre_p);
16538 break;
16540 case EXIT_EXPR:
16541 ret = gimplify_exit_expr (expr_p);
16542 break;
16544 case GOTO_EXPR:
16545 /* If the target is not LABEL, then it is a computed jump
16546 and the target needs to be gimplified. */
16547 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
16549 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
16550 NULL, is_gimple_val, fb_rvalue);
16551 if (ret == GS_ERROR)
16552 break;
16554 gimplify_seq_add_stmt (pre_p,
16555 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
16556 ret = GS_ALL_DONE;
16557 break;
16559 case PREDICT_EXPR:
16560 gimplify_seq_add_stmt (pre_p,
16561 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
16562 PREDICT_EXPR_OUTCOME (*expr_p)));
16563 ret = GS_ALL_DONE;
16564 break;
16566 case LABEL_EXPR:
16567 ret = gimplify_label_expr (expr_p, pre_p);
16568 label = LABEL_EXPR_LABEL (*expr_p);
16569 gcc_assert (decl_function_context (label) == current_function_decl);
16571 /* If the label is used in a goto statement, or address of the label
16572 is taken, we need to unpoison all variables that were seen so far.
16573 Doing so would prevent us from reporting a false positives. */
16574 if (asan_poisoned_variables
16575 && asan_used_labels != NULL
16576 && asan_used_labels->contains (label)
16577 && !gimplify_omp_ctxp)
16578 asan_poison_variables (asan_poisoned_variables, false, pre_p);
16579 break;
16581 case CASE_LABEL_EXPR:
16582 ret = gimplify_case_label_expr (expr_p, pre_p);
16584 if (gimplify_ctxp->live_switch_vars)
16585 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
16586 pre_p);
16587 break;
16589 case RETURN_EXPR:
16590 ret = gimplify_return_expr (*expr_p, pre_p);
16591 break;
16593 case CONSTRUCTOR:
16594 /* Don't reduce this in place; let gimplify_init_constructor work its
16595 magic. Buf if we're just elaborating this for side effects, just
16596 gimplify any element that has side-effects. */
16597 if (fallback == fb_none)
16599 unsigned HOST_WIDE_INT ix;
16600 tree val;
16601 tree temp = NULL_TREE;
16602 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
16603 if (TREE_SIDE_EFFECTS (val))
16604 append_to_statement_list (val, &temp);
16606 *expr_p = temp;
16607 ret = temp ? GS_OK : GS_ALL_DONE;
16609 /* C99 code may assign to an array in a constructed
16610 structure or union, and this has undefined behavior only
16611 on execution, so create a temporary if an lvalue is
16612 required. */
16613 else if (fallback == fb_lvalue)
16615 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16616 mark_addressable (*expr_p);
16617 ret = GS_OK;
16619 else
16620 ret = GS_ALL_DONE;
16621 break;
16623 /* The following are special cases that are not handled by the
16624 original GIMPLE grammar. */
16626 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
16627 eliminated. */
16628 case SAVE_EXPR:
16629 ret = gimplify_save_expr (expr_p, pre_p, post_p);
16630 break;
16632 case BIT_FIELD_REF:
16633 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16634 post_p, is_gimple_lvalue, fb_either);
16635 recalculate_side_effects (*expr_p);
16636 break;
16638 case TARGET_MEM_REF:
16640 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
16642 if (TMR_BASE (*expr_p))
16643 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
16644 post_p, is_gimple_mem_ref_addr, fb_either);
16645 if (TMR_INDEX (*expr_p))
16646 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
16647 post_p, is_gimple_val, fb_rvalue);
16648 if (TMR_INDEX2 (*expr_p))
16649 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
16650 post_p, is_gimple_val, fb_rvalue);
16651 /* TMR_STEP and TMR_OFFSET are always integer constants. */
16652 ret = MIN (r0, r1);
16654 break;
16656 case NON_LVALUE_EXPR:
16657 /* This should have been stripped above. */
16658 gcc_unreachable ();
16660 case ASM_EXPR:
16661 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
16662 break;
16664 case TRY_FINALLY_EXPR:
16665 case TRY_CATCH_EXPR:
16667 gimple_seq eval, cleanup;
16668 gtry *try_;
16670 /* Calls to destructors are generated automatically in FINALLY/CATCH
16671 block. They should have location as UNKNOWN_LOCATION. However,
16672 gimplify_call_expr will reset these call stmts to input_location
16673 if it finds stmt's location is unknown. To prevent resetting for
16674 destructors, we set the input_location to unknown.
16675 Note that this only affects the destructor calls in FINALLY/CATCH
16676 block, and will automatically reset to its original value by the
16677 end of gimplify_expr. */
16678 input_location = UNKNOWN_LOCATION;
16679 eval = cleanup = NULL;
16680 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
16681 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
16682 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
16684 gimple_seq n = NULL, e = NULL;
16685 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
16686 0), &n);
16687 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
16688 1), &e);
16689 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
16691 geh_else *stmt = gimple_build_eh_else (n, e);
16692 gimple_seq_add_stmt (&cleanup, stmt);
16695 else
16696 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
16697 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
16698 if (gimple_seq_empty_p (cleanup))
16700 gimple_seq_add_seq (pre_p, eval);
16701 ret = GS_ALL_DONE;
16702 break;
16704 try_ = gimple_build_try (eval, cleanup,
16705 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
16706 ? GIMPLE_TRY_FINALLY
16707 : GIMPLE_TRY_CATCH);
16708 if (EXPR_HAS_LOCATION (save_expr))
16709 gimple_set_location (try_, EXPR_LOCATION (save_expr));
16710 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
16711 gimple_set_location (try_, saved_location);
16712 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
16713 gimple_try_set_catch_is_cleanup (try_,
16714 TRY_CATCH_IS_CLEANUP (*expr_p));
16715 gimplify_seq_add_stmt (pre_p, try_);
16716 ret = GS_ALL_DONE;
16717 break;
16720 case CLEANUP_POINT_EXPR:
16721 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
16722 break;
16724 case TARGET_EXPR:
16725 ret = gimplify_target_expr (expr_p, pre_p, post_p);
16726 break;
16728 case CATCH_EXPR:
16730 gimple *c;
16731 gimple_seq handler = NULL;
16732 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
16733 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
16734 gimplify_seq_add_stmt (pre_p, c);
16735 ret = GS_ALL_DONE;
16736 break;
16739 case EH_FILTER_EXPR:
16741 gimple *ehf;
16742 gimple_seq failure = NULL;
16744 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
16745 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
16746 copy_warning (ehf, *expr_p);
16747 gimplify_seq_add_stmt (pre_p, ehf);
16748 ret = GS_ALL_DONE;
16749 break;
16752 case OBJ_TYPE_REF:
16754 enum gimplify_status r0, r1;
16755 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
16756 post_p, is_gimple_val, fb_rvalue);
16757 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
16758 post_p, is_gimple_val, fb_rvalue);
16759 TREE_SIDE_EFFECTS (*expr_p) = 0;
16760 ret = MIN (r0, r1);
16762 break;
16764 case LABEL_DECL:
16765 /* We get here when taking the address of a label. We mark
16766 the label as "forced"; meaning it can never be removed and
16767 it is a potential target for any computed goto. */
16768 FORCED_LABEL (*expr_p) = 1;
16769 ret = GS_ALL_DONE;
16770 break;
16772 case STATEMENT_LIST:
16773 ret = gimplify_statement_list (expr_p, pre_p);
16774 break;
16776 case WITH_SIZE_EXPR:
16778 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16779 post_p == &internal_post ? NULL : post_p,
16780 gimple_test_f, fallback);
16781 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
16782 is_gimple_val, fb_rvalue);
16783 ret = GS_ALL_DONE;
16785 break;
16787 case VAR_DECL:
16788 case PARM_DECL:
16789 ret = gimplify_var_or_parm_decl (expr_p);
16790 break;
16792 case RESULT_DECL:
16793 /* When within an OMP context, notice uses of variables. */
16794 if (gimplify_omp_ctxp)
16795 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
16796 ret = GS_ALL_DONE;
16797 break;
16799 case DEBUG_EXPR_DECL:
16800 gcc_unreachable ();
16802 case DEBUG_BEGIN_STMT:
16803 gimplify_seq_add_stmt (pre_p,
16804 gimple_build_debug_begin_stmt
16805 (TREE_BLOCK (*expr_p),
16806 EXPR_LOCATION (*expr_p)));
16807 ret = GS_ALL_DONE;
16808 *expr_p = NULL;
16809 break;
16811 case SSA_NAME:
16812 /* Allow callbacks into the gimplifier during optimization. */
16813 ret = GS_ALL_DONE;
16814 break;
16816 case OMP_PARALLEL:
16817 gimplify_omp_parallel (expr_p, pre_p);
16818 ret = GS_ALL_DONE;
16819 break;
16821 case OMP_TASK:
16822 gimplify_omp_task (expr_p, pre_p);
16823 ret = GS_ALL_DONE;
16824 break;
16826 case OMP_SIMD:
16828 /* Temporarily disable into_ssa, as scan_omp_simd
16829 which calls copy_gimple_seq_and_replace_locals can't deal
16830 with SSA_NAMEs defined outside of the body properly. */
16831 bool saved_into_ssa = gimplify_ctxp->into_ssa;
16832 gimplify_ctxp->into_ssa = false;
16833 ret = gimplify_omp_for (expr_p, pre_p);
16834 gimplify_ctxp->into_ssa = saved_into_ssa;
16835 break;
16838 case OMP_FOR:
16839 case OMP_DISTRIBUTE:
16840 case OMP_TASKLOOP:
16841 case OACC_LOOP:
16842 ret = gimplify_omp_for (expr_p, pre_p);
16843 break;
16845 case OMP_LOOP:
16846 ret = gimplify_omp_loop (expr_p, pre_p);
16847 break;
16849 case OACC_CACHE:
16850 gimplify_oacc_cache (expr_p, pre_p);
16851 ret = GS_ALL_DONE;
16852 break;
16854 case OACC_DECLARE:
16855 gimplify_oacc_declare (expr_p, pre_p);
16856 ret = GS_ALL_DONE;
16857 break;
16859 case OACC_HOST_DATA:
16860 case OACC_DATA:
16861 case OACC_KERNELS:
16862 case OACC_PARALLEL:
16863 case OACC_SERIAL:
16864 case OMP_SCOPE:
16865 case OMP_SECTIONS:
16866 case OMP_SINGLE:
16867 case OMP_TARGET:
16868 case OMP_TARGET_DATA:
16869 case OMP_TEAMS:
16870 gimplify_omp_workshare (expr_p, pre_p);
16871 ret = GS_ALL_DONE;
16872 break;
16874 case OACC_ENTER_DATA:
16875 case OACC_EXIT_DATA:
16876 case OACC_UPDATE:
16877 case OMP_TARGET_UPDATE:
16878 case OMP_TARGET_ENTER_DATA:
16879 case OMP_TARGET_EXIT_DATA:
16880 gimplify_omp_target_update (expr_p, pre_p);
16881 ret = GS_ALL_DONE;
16882 break;
16884 case OMP_SECTION:
16885 case OMP_MASTER:
16886 case OMP_MASKED:
16887 case OMP_ORDERED:
16888 case OMP_CRITICAL:
16889 case OMP_SCAN:
16891 gimple_seq body = NULL;
16892 gimple *g;
16893 bool saved_in_omp_construct = in_omp_construct;
16895 in_omp_construct = true;
16896 gimplify_and_add (OMP_BODY (*expr_p), &body);
16897 in_omp_construct = saved_in_omp_construct;
16898 switch (TREE_CODE (*expr_p))
16900 case OMP_SECTION:
16901 g = gimple_build_omp_section (body);
16902 break;
16903 case OMP_MASTER:
16904 g = gimple_build_omp_master (body);
16905 break;
16906 case OMP_ORDERED:
16907 g = gimplify_omp_ordered (*expr_p, body);
16908 if (OMP_BODY (*expr_p) == NULL_TREE
16909 && gimple_code (g) == GIMPLE_OMP_ORDERED)
16910 gimple_omp_ordered_standalone (g);
16911 break;
16912 case OMP_MASKED:
16913 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p),
16914 pre_p, ORT_WORKSHARE, OMP_MASKED);
16915 gimplify_adjust_omp_clauses (pre_p, body,
16916 &OMP_MASKED_CLAUSES (*expr_p),
16917 OMP_MASKED);
16918 g = gimple_build_omp_masked (body,
16919 OMP_MASKED_CLAUSES (*expr_p));
16920 break;
16921 case OMP_CRITICAL:
16922 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
16923 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
16924 gimplify_adjust_omp_clauses (pre_p, body,
16925 &OMP_CRITICAL_CLAUSES (*expr_p),
16926 OMP_CRITICAL);
16927 g = gimple_build_omp_critical (body,
16928 OMP_CRITICAL_NAME (*expr_p),
16929 OMP_CRITICAL_CLAUSES (*expr_p));
16930 break;
16931 case OMP_SCAN:
16932 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
16933 pre_p, ORT_WORKSHARE, OMP_SCAN);
16934 gimplify_adjust_omp_clauses (pre_p, body,
16935 &OMP_SCAN_CLAUSES (*expr_p),
16936 OMP_SCAN);
16937 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
16938 break;
16939 default:
16940 gcc_unreachable ();
16942 gimplify_seq_add_stmt (pre_p, g);
16943 ret = GS_ALL_DONE;
16944 break;
16947 case OMP_TASKGROUP:
16949 gimple_seq body = NULL;
16951 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
16952 bool saved_in_omp_construct = in_omp_construct;
16953 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
16954 OMP_TASKGROUP);
16955 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
16957 in_omp_construct = true;
16958 gimplify_and_add (OMP_BODY (*expr_p), &body);
16959 in_omp_construct = saved_in_omp_construct;
16960 gimple_seq cleanup = NULL;
16961 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
16962 gimple *g = gimple_build_call (fn, 0);
16963 gimple_seq_add_stmt (&cleanup, g);
16964 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
16965 body = NULL;
16966 gimple_seq_add_stmt (&body, g);
16967 g = gimple_build_omp_taskgroup (body, *pclauses);
16968 gimplify_seq_add_stmt (pre_p, g);
16969 ret = GS_ALL_DONE;
16970 break;
16973 case OMP_ATOMIC:
16974 case OMP_ATOMIC_READ:
16975 case OMP_ATOMIC_CAPTURE_OLD:
16976 case OMP_ATOMIC_CAPTURE_NEW:
16977 ret = gimplify_omp_atomic (expr_p, pre_p);
16978 break;
16980 case TRANSACTION_EXPR:
16981 ret = gimplify_transaction (expr_p, pre_p);
16982 break;
16984 case TRUTH_AND_EXPR:
16985 case TRUTH_OR_EXPR:
16986 case TRUTH_XOR_EXPR:
16988 tree orig_type = TREE_TYPE (*expr_p);
16989 tree new_type, xop0, xop1;
16990 *expr_p = gimple_boolify (*expr_p);
16991 new_type = TREE_TYPE (*expr_p);
16992 if (!useless_type_conversion_p (orig_type, new_type))
16994 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
16995 ret = GS_OK;
16996 break;
16999 /* Boolified binary truth expressions are semantically equivalent
17000 to bitwise binary expressions. Canonicalize them to the
17001 bitwise variant. */
17002 switch (TREE_CODE (*expr_p))
17004 case TRUTH_AND_EXPR:
17005 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
17006 break;
17007 case TRUTH_OR_EXPR:
17008 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
17009 break;
17010 case TRUTH_XOR_EXPR:
17011 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
17012 break;
17013 default:
17014 break;
17016 /* Now make sure that operands have compatible type to
17017 expression's new_type. */
17018 xop0 = TREE_OPERAND (*expr_p, 0);
17019 xop1 = TREE_OPERAND (*expr_p, 1);
17020 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
17021 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
17022 new_type,
17023 xop0);
17024 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
17025 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
17026 new_type,
17027 xop1);
17028 /* Continue classified as tcc_binary. */
17029 goto expr_2;
17032 case VEC_COND_EXPR:
17033 goto expr_3;
17035 case VEC_PERM_EXPR:
17036 /* Classified as tcc_expression. */
17037 goto expr_3;
17039 case BIT_INSERT_EXPR:
17040 /* Argument 3 is a constant. */
17041 goto expr_2;
17043 case POINTER_PLUS_EXPR:
17045 enum gimplify_status r0, r1;
17046 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17047 post_p, is_gimple_val, fb_rvalue);
17048 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17049 post_p, is_gimple_val, fb_rvalue);
17050 recalculate_side_effects (*expr_p);
17051 ret = MIN (r0, r1);
17052 break;
17055 default:
17056 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
17058 case tcc_comparison:
17059 /* Handle comparison of objects of non scalar mode aggregates
17060 with a call to memcmp. It would be nice to only have to do
17061 this for variable-sized objects, but then we'd have to allow
17062 the same nest of reference nodes we allow for MODIFY_EXPR and
17063 that's too complex.
17065 Compare scalar mode aggregates as scalar mode values. Using
17066 memcmp for them would be very inefficient at best, and is
17067 plain wrong if bitfields are involved. */
17069 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
17071 /* Vector comparisons need no boolification. */
17072 if (TREE_CODE (type) == VECTOR_TYPE)
17073 goto expr_2;
17074 else if (!AGGREGATE_TYPE_P (type))
17076 tree org_type = TREE_TYPE (*expr_p);
17077 *expr_p = gimple_boolify (*expr_p);
17078 if (!useless_type_conversion_p (org_type,
17079 TREE_TYPE (*expr_p)))
17081 *expr_p = fold_convert_loc (input_location,
17082 org_type, *expr_p);
17083 ret = GS_OK;
17085 else
17086 goto expr_2;
17088 else if (TYPE_MODE (type) != BLKmode)
17089 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
17090 else
17091 ret = gimplify_variable_sized_compare (expr_p);
17093 break;
17096 /* If *EXPR_P does not need to be special-cased, handle it
17097 according to its class. */
17098 case tcc_unary:
17099 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17100 post_p, is_gimple_val, fb_rvalue);
17101 break;
17103 case tcc_binary:
17104 expr_2:
17106 enum gimplify_status r0, r1;
17108 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17109 post_p, is_gimple_val, fb_rvalue);
17110 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17111 post_p, is_gimple_val, fb_rvalue);
17113 ret = MIN (r0, r1);
17114 break;
17117 expr_3:
17119 enum gimplify_status r0, r1, r2;
17121 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17122 post_p, is_gimple_val, fb_rvalue);
17123 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17124 post_p, is_gimple_val, fb_rvalue);
17125 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
17126 post_p, is_gimple_val, fb_rvalue);
17128 ret = MIN (MIN (r0, r1), r2);
17129 break;
17132 case tcc_declaration:
17133 case tcc_constant:
17134 ret = GS_ALL_DONE;
17135 goto dont_recalculate;
17137 default:
17138 gcc_unreachable ();
17141 recalculate_side_effects (*expr_p);
17143 dont_recalculate:
17144 break;
17147 gcc_assert (*expr_p || ret != GS_OK);
17149 while (ret == GS_OK);
17151 /* If we encountered an error_mark somewhere nested inside, either
17152 stub out the statement or propagate the error back out. */
17153 if (ret == GS_ERROR)
17155 if (is_statement)
17156 *expr_p = NULL;
17157 goto out;
17160 /* This was only valid as a return value from the langhook, which
17161 we handled. Make sure it doesn't escape from any other context. */
17162 gcc_assert (ret != GS_UNHANDLED);
17164 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
17166 /* We aren't looking for a value, and we don't have a valid
17167 statement. If it doesn't have side-effects, throw it away.
17168 We can also get here with code such as "*&&L;", where L is
17169 a LABEL_DECL that is marked as FORCED_LABEL. */
17170 if (TREE_CODE (*expr_p) == LABEL_DECL
17171 || !TREE_SIDE_EFFECTS (*expr_p))
17172 *expr_p = NULL;
17173 else if (!TREE_THIS_VOLATILE (*expr_p))
17175 /* This is probably a _REF that contains something nested that
17176 has side effects. Recurse through the operands to find it. */
17177 enum tree_code code = TREE_CODE (*expr_p);
17179 switch (code)
17181 case COMPONENT_REF:
17182 case REALPART_EXPR:
17183 case IMAGPART_EXPR:
17184 case VIEW_CONVERT_EXPR:
17185 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17186 gimple_test_f, fallback);
17187 break;
17189 case ARRAY_REF:
17190 case ARRAY_RANGE_REF:
17191 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17192 gimple_test_f, fallback);
17193 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
17194 gimple_test_f, fallback);
17195 break;
17197 default:
17198 /* Anything else with side-effects must be converted to
17199 a valid statement before we get here. */
17200 gcc_unreachable ();
17203 *expr_p = NULL;
17205 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
17206 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
17207 && !is_empty_type (TREE_TYPE (*expr_p)))
17209 /* Historically, the compiler has treated a bare reference
17210 to a non-BLKmode volatile lvalue as forcing a load. */
17211 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
17213 /* Normally, we do not want to create a temporary for a
17214 TREE_ADDRESSABLE type because such a type should not be
17215 copied by bitwise-assignment. However, we make an
17216 exception here, as all we are doing here is ensuring that
17217 we read the bytes that make up the type. We use
17218 create_tmp_var_raw because create_tmp_var will abort when
17219 given a TREE_ADDRESSABLE type. */
17220 tree tmp = create_tmp_var_raw (type, "vol");
17221 gimple_add_tmp_var (tmp);
17222 gimplify_assign (tmp, *expr_p, pre_p);
17223 *expr_p = NULL;
17225 else
17226 /* We can't do anything useful with a volatile reference to
17227 an incomplete type, so just throw it away. Likewise for
17228 a BLKmode type, since any implicit inner load should
17229 already have been turned into an explicit one by the
17230 gimplification process. */
17231 *expr_p = NULL;
17234 /* If we are gimplifying at the statement level, we're done. Tack
17235 everything together and return. */
17236 if (fallback == fb_none || is_statement)
17238 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
17239 it out for GC to reclaim it. */
17240 *expr_p = NULL_TREE;
17242 if (!gimple_seq_empty_p (internal_pre)
17243 || !gimple_seq_empty_p (internal_post))
17245 gimplify_seq_add_seq (&internal_pre, internal_post);
17246 gimplify_seq_add_seq (pre_p, internal_pre);
17249 /* The result of gimplifying *EXPR_P is going to be the last few
17250 statements in *PRE_P and *POST_P. Add location information
17251 to all the statements that were added by the gimplification
17252 helpers. */
17253 if (!gimple_seq_empty_p (*pre_p))
17254 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
17256 if (!gimple_seq_empty_p (*post_p))
17257 annotate_all_with_location_after (*post_p, post_last_gsi,
17258 input_location);
17260 goto out;
17263 #ifdef ENABLE_GIMPLE_CHECKING
17264 if (*expr_p)
17266 enum tree_code code = TREE_CODE (*expr_p);
17267 /* These expressions should already be in gimple IR form. */
17268 gcc_assert (code != MODIFY_EXPR
17269 && code != ASM_EXPR
17270 && code != BIND_EXPR
17271 && code != CATCH_EXPR
17272 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
17273 && code != EH_FILTER_EXPR
17274 && code != GOTO_EXPR
17275 && code != LABEL_EXPR
17276 && code != LOOP_EXPR
17277 && code != SWITCH_EXPR
17278 && code != TRY_FINALLY_EXPR
17279 && code != EH_ELSE_EXPR
17280 && code != OACC_PARALLEL
17281 && code != OACC_KERNELS
17282 && code != OACC_SERIAL
17283 && code != OACC_DATA
17284 && code != OACC_HOST_DATA
17285 && code != OACC_DECLARE
17286 && code != OACC_UPDATE
17287 && code != OACC_ENTER_DATA
17288 && code != OACC_EXIT_DATA
17289 && code != OACC_CACHE
17290 && code != OMP_CRITICAL
17291 && code != OMP_FOR
17292 && code != OACC_LOOP
17293 && code != OMP_MASTER
17294 && code != OMP_MASKED
17295 && code != OMP_TASKGROUP
17296 && code != OMP_ORDERED
17297 && code != OMP_PARALLEL
17298 && code != OMP_SCAN
17299 && code != OMP_SECTIONS
17300 && code != OMP_SECTION
17301 && code != OMP_SINGLE
17302 && code != OMP_SCOPE);
17304 #endif
17306 /* Otherwise we're gimplifying a subexpression, so the resulting
17307 value is interesting. If it's a valid operand that matches
17308 GIMPLE_TEST_F, we're done. Unless we are handling some
17309 post-effects internally; if that's the case, we need to copy into
17310 a temporary before adding the post-effects to POST_P. */
17311 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
17312 goto out;
17314 /* Otherwise, we need to create a new temporary for the gimplified
17315 expression. */
17317 /* We can't return an lvalue if we have an internal postqueue. The
17318 object the lvalue refers to would (probably) be modified by the
17319 postqueue; we need to copy the value out first, which means an
17320 rvalue. */
17321 if ((fallback & fb_lvalue)
17322 && gimple_seq_empty_p (internal_post)
17323 && is_gimple_addressable (*expr_p))
17325 /* An lvalue will do. Take the address of the expression, store it
17326 in a temporary, and replace the expression with an INDIRECT_REF of
17327 that temporary. */
17328 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
17329 unsigned int ref_align = get_object_alignment (*expr_p);
17330 tree ref_type = TREE_TYPE (*expr_p);
17331 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
17332 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
17333 if (TYPE_ALIGN (ref_type) != ref_align)
17334 ref_type = build_aligned_type (ref_type, ref_align);
17335 *expr_p = build2 (MEM_REF, ref_type,
17336 tmp, build_zero_cst (ref_alias_type));
17338 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
17340 /* An rvalue will do. Assign the gimplified expression into a
17341 new temporary TMP and replace the original expression with
17342 TMP. First, make sure that the expression has a type so that
17343 it can be assigned into a temporary. */
17344 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
17345 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
17347 else
17349 #ifdef ENABLE_GIMPLE_CHECKING
17350 if (!(fallback & fb_mayfail))
17352 fprintf (stderr, "gimplification failed:\n");
17353 print_generic_expr (stderr, *expr_p);
17354 debug_tree (*expr_p);
17355 internal_error ("gimplification failed");
17357 #endif
17358 gcc_assert (fallback & fb_mayfail);
17360 /* If this is an asm statement, and the user asked for the
17361 impossible, don't die. Fail and let gimplify_asm_expr
17362 issue an error. */
17363 ret = GS_ERROR;
17364 goto out;
17367 /* Make sure the temporary matches our predicate. */
17368 gcc_assert ((*gimple_test_f) (*expr_p));
17370 if (!gimple_seq_empty_p (internal_post))
17372 annotate_all_with_location (internal_post, input_location);
17373 gimplify_seq_add_seq (pre_p, internal_post);
17376 out:
17377 input_location = saved_location;
17378 return ret;
17381 /* Like gimplify_expr but make sure the gimplified result is not itself
17382 a SSA name (but a decl if it were). Temporaries required by
17383 evaluating *EXPR_P may be still SSA names. */
17385 static enum gimplify_status
17386 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
17387 bool (*gimple_test_f) (tree), fallback_t fallback,
17388 bool allow_ssa)
17390 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
17391 gimple_test_f, fallback);
17392 if (! allow_ssa
17393 && TREE_CODE (*expr_p) == SSA_NAME)
17394 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
17395 return ret;
17398 /* Look through TYPE for variable-sized objects and gimplify each such
17399 size that we find. Add to LIST_P any statements generated. */
17401 void
17402 gimplify_type_sizes (tree type, gimple_seq *list_p)
17404 if (type == NULL || type == error_mark_node)
17405 return;
17407 const bool ignored_p
17408 = TYPE_NAME (type)
17409 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
17410 && DECL_IGNORED_P (TYPE_NAME (type));
17411 tree t;
17413 /* We first do the main variant, then copy into any other variants. */
17414 type = TYPE_MAIN_VARIANT (type);
17416 /* Avoid infinite recursion. */
17417 if (TYPE_SIZES_GIMPLIFIED (type))
17418 return;
17420 TYPE_SIZES_GIMPLIFIED (type) = 1;
17422 switch (TREE_CODE (type))
17424 case INTEGER_TYPE:
17425 case ENUMERAL_TYPE:
17426 case BOOLEAN_TYPE:
17427 case REAL_TYPE:
17428 case FIXED_POINT_TYPE:
17429 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
17430 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
17432 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17434 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
17435 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
17437 break;
17439 case ARRAY_TYPE:
17440 /* These types may not have declarations, so handle them here. */
17441 gimplify_type_sizes (TREE_TYPE (type), list_p);
17442 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
17443 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
17444 with assigned stack slots, for -O1+ -g they should be tracked
17445 by VTA. */
17446 if (!ignored_p
17447 && TYPE_DOMAIN (type)
17448 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
17450 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
17451 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17452 DECL_IGNORED_P (t) = 0;
17453 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
17454 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17455 DECL_IGNORED_P (t) = 0;
17457 break;
17459 case RECORD_TYPE:
17460 case UNION_TYPE:
17461 case QUAL_UNION_TYPE:
17462 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
17463 if (TREE_CODE (field) == FIELD_DECL)
17465 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
17466 /* Likewise, ensure variable offsets aren't removed. */
17467 if (!ignored_p
17468 && (t = DECL_FIELD_OFFSET (field))
17469 && VAR_P (t)
17470 && DECL_ARTIFICIAL (t))
17471 DECL_IGNORED_P (t) = 0;
17472 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
17473 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
17474 gimplify_type_sizes (TREE_TYPE (field), list_p);
17476 break;
17478 case POINTER_TYPE:
17479 case REFERENCE_TYPE:
17480 /* We used to recurse on the pointed-to type here, which turned out to
17481 be incorrect because its definition might refer to variables not
17482 yet initialized at this point if a forward declaration is involved.
17484 It was actually useful for anonymous pointed-to types to ensure
17485 that the sizes evaluation dominates every possible later use of the
17486 values. Restricting to such types here would be safe since there
17487 is no possible forward declaration around, but would introduce an
17488 undesirable middle-end semantic to anonymity. We then defer to
17489 front-ends the responsibility of ensuring that the sizes are
17490 evaluated both early and late enough, e.g. by attaching artificial
17491 type declarations to the tree. */
17492 break;
17494 default:
17495 break;
17498 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
17499 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
17501 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17503 TYPE_SIZE (t) = TYPE_SIZE (type);
17504 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
17505 TYPE_SIZES_GIMPLIFIED (t) = 1;
17509 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
17510 a size or position, has had all of its SAVE_EXPRs evaluated.
17511 We add any required statements to *STMT_P. */
17513 void
17514 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
17516 tree expr = *expr_p;
17518 /* We don't do anything if the value isn't there, is constant, or contains
17519 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
17520 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
17521 will want to replace it with a new variable, but that will cause problems
17522 if this type is from outside the function. It's OK to have that here. */
17523 if (expr == NULL_TREE
17524 || is_gimple_constant (expr)
17525 || TREE_CODE (expr) == VAR_DECL
17526 || CONTAINS_PLACEHOLDER_P (expr))
17527 return;
17529 *expr_p = unshare_expr (expr);
17531 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
17532 if the def vanishes. */
17533 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
17535 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
17536 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
17537 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
17538 if (is_gimple_constant (*expr_p))
17539 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
17542 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
17543 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
17544 is true, also gimplify the parameters. */
17546 gbind *
17547 gimplify_body (tree fndecl, bool do_parms)
17549 location_t saved_location = input_location;
17550 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
17551 gimple *outer_stmt;
17552 gbind *outer_bind;
17554 timevar_push (TV_TREE_GIMPLIFY);
17556 init_tree_ssa (cfun);
17558 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
17559 gimplification. */
17560 default_rtl_profile ();
17562 gcc_assert (gimplify_ctxp == NULL);
17563 push_gimplify_context (true);
17565 if (flag_openacc || flag_openmp)
17567 gcc_assert (gimplify_omp_ctxp == NULL);
17568 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
17569 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
17572 /* Unshare most shared trees in the body and in that of any nested functions.
17573 It would seem we don't have to do this for nested functions because
17574 they are supposed to be output and then the outer function gimplified
17575 first, but the g++ front end doesn't always do it that way. */
17576 unshare_body (fndecl);
17577 unvisit_body (fndecl);
17579 /* Make sure input_location isn't set to something weird. */
17580 input_location = DECL_SOURCE_LOCATION (fndecl);
17582 /* Resolve callee-copies. This has to be done before processing
17583 the body so that DECL_VALUE_EXPR gets processed correctly. */
17584 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
17586 /* Gimplify the function's body. */
17587 seq = NULL;
17588 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
17589 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
17590 if (!outer_stmt)
17592 outer_stmt = gimple_build_nop ();
17593 gimplify_seq_add_stmt (&seq, outer_stmt);
17596 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
17597 not the case, wrap everything in a GIMPLE_BIND to make it so. */
17598 if (gimple_code (outer_stmt) == GIMPLE_BIND
17599 && (gimple_seq_first_nondebug_stmt (seq)
17600 == gimple_seq_last_nondebug_stmt (seq)))
17602 outer_bind = as_a <gbind *> (outer_stmt);
17603 if (gimple_seq_first_stmt (seq) != outer_stmt
17604 || gimple_seq_last_stmt (seq) != outer_stmt)
17606 /* If there are debug stmts before or after outer_stmt, move them
17607 inside of outer_bind body. */
17608 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
17609 gimple_seq second_seq = NULL;
17610 if (gimple_seq_first_stmt (seq) != outer_stmt
17611 && gimple_seq_last_stmt (seq) != outer_stmt)
17613 second_seq = gsi_split_seq_after (gsi);
17614 gsi_remove (&gsi, false);
17616 else if (gimple_seq_first_stmt (seq) != outer_stmt)
17617 gsi_remove (&gsi, false);
17618 else
17620 gsi_remove (&gsi, false);
17621 second_seq = seq;
17622 seq = NULL;
17624 gimple_seq_add_seq_without_update (&seq,
17625 gimple_bind_body (outer_bind));
17626 gimple_seq_add_seq_without_update (&seq, second_seq);
17627 gimple_bind_set_body (outer_bind, seq);
17630 else
17631 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
17633 DECL_SAVED_TREE (fndecl) = NULL_TREE;
17635 /* If we had callee-copies statements, insert them at the beginning
17636 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
17637 if (!gimple_seq_empty_p (parm_stmts))
17639 tree parm;
17641 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
17642 if (parm_cleanup)
17644 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
17645 GIMPLE_TRY_FINALLY);
17646 parm_stmts = NULL;
17647 gimple_seq_add_stmt (&parm_stmts, g);
17649 gimple_bind_set_body (outer_bind, parm_stmts);
17651 for (parm = DECL_ARGUMENTS (current_function_decl);
17652 parm; parm = DECL_CHAIN (parm))
17653 if (DECL_HAS_VALUE_EXPR_P (parm))
17655 DECL_HAS_VALUE_EXPR_P (parm) = 0;
17656 DECL_IGNORED_P (parm) = 0;
17660 if ((flag_openacc || flag_openmp || flag_openmp_simd)
17661 && gimplify_omp_ctxp)
17663 delete_omp_context (gimplify_omp_ctxp);
17664 gimplify_omp_ctxp = NULL;
17667 pop_gimplify_context (outer_bind);
17668 gcc_assert (gimplify_ctxp == NULL);
17670 if (flag_checking && !seen_error ())
17671 verify_gimple_in_seq (gimple_bind_body (outer_bind));
17673 timevar_pop (TV_TREE_GIMPLIFY);
17674 input_location = saved_location;
17676 return outer_bind;
17679 typedef char *char_p; /* For DEF_VEC_P. */
17681 /* Return whether we should exclude FNDECL from instrumentation. */
17683 static bool
17684 flag_instrument_functions_exclude_p (tree fndecl)
17686 vec<char_p> *v;
17688 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
17689 if (v && v->length () > 0)
17691 const char *name;
17692 int i;
17693 char *s;
17695 name = lang_hooks.decl_printable_name (fndecl, 1);
17696 FOR_EACH_VEC_ELT (*v, i, s)
17697 if (strstr (name, s) != NULL)
17698 return true;
17701 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
17702 if (v && v->length () > 0)
17704 const char *name;
17705 int i;
17706 char *s;
17708 name = DECL_SOURCE_FILE (fndecl);
17709 FOR_EACH_VEC_ELT (*v, i, s)
17710 if (strstr (name, s) != NULL)
17711 return true;
17714 return false;
17717 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
17718 If COND_VAR is not NULL, it is a boolean variable guarding the call to
17719 the instrumentation function. IF STMT is not NULL, it is a statement
17720 to be executed just before the call to the instrumentation function. */
17722 static void
17723 build_instrumentation_call (gimple_seq *seq, enum built_in_function fncode,
17724 tree cond_var, gimple *stmt)
17726 /* The instrumentation hooks aren't going to call the instrumented
17727 function and the address they receive is expected to be matchable
17728 against symbol addresses. Make sure we don't create a trampoline,
17729 in case the current function is nested. */
17730 tree this_fn_addr = build_fold_addr_expr (current_function_decl);
17731 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
17733 tree label_true, label_false;
17734 if (cond_var)
17736 label_true = create_artificial_label (UNKNOWN_LOCATION);
17737 label_false = create_artificial_label (UNKNOWN_LOCATION);
17738 gcond *cond = gimple_build_cond (EQ_EXPR, cond_var, boolean_false_node,
17739 label_true, label_false);
17740 gimplify_seq_add_stmt (seq, cond);
17741 gimplify_seq_add_stmt (seq, gimple_build_label (label_true));
17742 gimplify_seq_add_stmt (seq, gimple_build_predict (PRED_COLD_LABEL,
17743 NOT_TAKEN));
17746 if (stmt)
17747 gimplify_seq_add_stmt (seq, stmt);
17749 tree x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
17750 gcall *call = gimple_build_call (x, 1, integer_zero_node);
17751 tree tmp_var = create_tmp_var (ptr_type_node, "return_addr");
17752 gimple_call_set_lhs (call, tmp_var);
17753 gimplify_seq_add_stmt (seq, call);
17754 x = builtin_decl_implicit (fncode);
17755 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
17756 gimplify_seq_add_stmt (seq, call);
17758 if (cond_var)
17759 gimplify_seq_add_stmt (seq, gimple_build_label (label_false));
17762 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
17763 node for the function we want to gimplify.
17765 Return the sequence of GIMPLE statements corresponding to the body
17766 of FNDECL. */
17768 void
17769 gimplify_function_tree (tree fndecl)
17771 gimple_seq seq;
17772 gbind *bind;
17774 gcc_assert (!gimple_body (fndecl));
17776 if (DECL_STRUCT_FUNCTION (fndecl))
17777 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
17778 else
17779 push_struct_function (fndecl);
17781 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
17782 if necessary. */
17783 cfun->curr_properties |= PROP_gimple_lva;
17785 if (asan_sanitize_use_after_scope ())
17786 asan_poisoned_variables = new hash_set<tree> ();
17787 bind = gimplify_body (fndecl, true);
17788 if (asan_poisoned_variables)
17790 delete asan_poisoned_variables;
17791 asan_poisoned_variables = NULL;
17794 /* The tree body of the function is no longer needed, replace it
17795 with the new GIMPLE body. */
17796 seq = NULL;
17797 gimple_seq_add_stmt (&seq, bind);
17798 gimple_set_body (fndecl, seq);
17800 /* If we're instrumenting function entry/exit, then prepend the call to
17801 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
17802 catch the exit hook. */
17803 /* ??? Add some way to ignore exceptions for this TFE. */
17804 if (flag_instrument_function_entry_exit
17805 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
17806 /* Do not instrument extern inline functions. */
17807 && !(DECL_DECLARED_INLINE_P (fndecl)
17808 && DECL_EXTERNAL (fndecl)
17809 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
17810 && !flag_instrument_functions_exclude_p (fndecl))
17812 gimple_seq body = NULL, cleanup = NULL;
17813 gassign *assign;
17814 tree cond_var;
17816 /* If -finstrument-functions-once is specified, generate:
17818 static volatile bool C.0 = false;
17819 bool tmp_called;
17821 tmp_called = C.0;
17822 if (!tmp_called)
17824 C.0 = true;
17825 [call profiling enter function]
17828 without specific protection for data races. */
17829 if (flag_instrument_function_entry_exit > 1)
17831 tree first_var
17832 = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
17833 VAR_DECL,
17834 create_tmp_var_name ("C"),
17835 boolean_type_node);
17836 DECL_ARTIFICIAL (first_var) = 1;
17837 DECL_IGNORED_P (first_var) = 1;
17838 TREE_STATIC (first_var) = 1;
17839 TREE_THIS_VOLATILE (first_var) = 1;
17840 TREE_USED (first_var) = 1;
17841 DECL_INITIAL (first_var) = boolean_false_node;
17842 varpool_node::add (first_var);
17844 cond_var = create_tmp_var (boolean_type_node, "tmp_called");
17845 assign = gimple_build_assign (cond_var, first_var);
17846 gimplify_seq_add_stmt (&body, assign);
17848 assign = gimple_build_assign (first_var, boolean_true_node);
17851 else
17853 cond_var = NULL_TREE;
17854 assign = NULL;
17857 build_instrumentation_call (&body, BUILT_IN_PROFILE_FUNC_ENTER,
17858 cond_var, assign);
17860 /* If -finstrument-functions-once is specified, generate:
17862 if (!tmp_called)
17863 [call profiling exit function]
17865 without specific protection for data races. */
17866 build_instrumentation_call (&cleanup, BUILT_IN_PROFILE_FUNC_EXIT,
17867 cond_var, NULL);
17869 gimple *tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
17870 gimplify_seq_add_stmt (&body, tf);
17871 gbind *new_bind = gimple_build_bind (NULL, body, NULL);
17873 /* Replace the current function body with the body
17874 wrapped in the try/finally TF. */
17875 seq = NULL;
17876 gimple_seq_add_stmt (&seq, new_bind);
17877 gimple_set_body (fndecl, seq);
17878 bind = new_bind;
17881 if (sanitize_flags_p (SANITIZE_THREAD)
17882 && param_tsan_instrument_func_entry_exit)
17884 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
17885 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
17886 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
17887 /* Replace the current function body with the body
17888 wrapped in the try/finally TF. */
17889 seq = NULL;
17890 gimple_seq_add_stmt (&seq, new_bind);
17891 gimple_set_body (fndecl, seq);
17894 DECL_SAVED_TREE (fndecl) = NULL_TREE;
17895 cfun->curr_properties |= PROP_gimple_any;
17897 pop_cfun ();
17899 dump_function (TDI_gimple, fndecl);
17902 /* Return a dummy expression of type TYPE in order to keep going after an
17903 error. */
17905 static tree
17906 dummy_object (tree type)
17908 tree t = build_int_cst (build_pointer_type (type), 0);
17909 return build2 (MEM_REF, type, t, t);
17912 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
17913 builtin function, but a very special sort of operator. */
17915 enum gimplify_status
17916 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
17917 gimple_seq *post_p ATTRIBUTE_UNUSED)
17919 tree promoted_type, have_va_type;
17920 tree valist = TREE_OPERAND (*expr_p, 0);
17921 tree type = TREE_TYPE (*expr_p);
17922 tree t, tag, aptag;
17923 location_t loc = EXPR_LOCATION (*expr_p);
17925 /* Verify that valist is of the proper type. */
17926 have_va_type = TREE_TYPE (valist);
17927 if (have_va_type == error_mark_node)
17928 return GS_ERROR;
17929 have_va_type = targetm.canonical_va_list_type (have_va_type);
17930 if (have_va_type == NULL_TREE
17931 && POINTER_TYPE_P (TREE_TYPE (valist)))
17932 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
17933 have_va_type
17934 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
17935 gcc_assert (have_va_type != NULL_TREE);
17937 /* Generate a diagnostic for requesting data of a type that cannot
17938 be passed through `...' due to type promotion at the call site. */
17939 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
17940 != type)
17942 static bool gave_help;
17943 bool warned;
17944 /* Use the expansion point to handle cases such as passing bool (defined
17945 in a system header) through `...'. */
17946 location_t xloc
17947 = expansion_point_location_if_in_system_header (loc);
17949 /* Unfortunately, this is merely undefined, rather than a constraint
17950 violation, so we cannot make this an error. If this call is never
17951 executed, the program is still strictly conforming. */
17952 auto_diagnostic_group d;
17953 warned = warning_at (xloc, 0,
17954 "%qT is promoted to %qT when passed through %<...%>",
17955 type, promoted_type);
17956 if (!gave_help && warned)
17958 gave_help = true;
17959 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
17960 promoted_type, type);
17963 /* We can, however, treat "undefined" any way we please.
17964 Call abort to encourage the user to fix the program. */
17965 if (warned)
17966 inform (xloc, "if this code is reached, the program will abort");
17967 /* Before the abort, allow the evaluation of the va_list
17968 expression to exit or longjmp. */
17969 gimplify_and_add (valist, pre_p);
17970 t = build_call_expr_loc (loc,
17971 builtin_decl_implicit (BUILT_IN_TRAP), 0);
17972 gimplify_and_add (t, pre_p);
17974 /* This is dead code, but go ahead and finish so that the
17975 mode of the result comes out right. */
17976 *expr_p = dummy_object (type);
17977 return GS_ALL_DONE;
17980 tag = build_int_cst (build_pointer_type (type), 0);
17981 aptag = build_int_cst (TREE_TYPE (valist), 0);
17983 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
17984 valist, tag, aptag);
17986 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
17987 needs to be expanded. */
17988 cfun->curr_properties &= ~PROP_gimple_lva;
17990 return GS_OK;
17993 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
17995 DST/SRC are the destination and source respectively. You can pass
17996 ungimplified trees in DST or SRC, in which case they will be
17997 converted to a gimple operand if necessary.
17999 This function returns the newly created GIMPLE_ASSIGN tuple. */
18001 gimple *
18002 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
18004 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
18005 gimplify_and_add (t, seq_p);
18006 ggc_free (t);
18007 return gimple_seq_last_stmt (*seq_p);
18010 inline hashval_t
18011 gimplify_hasher::hash (const elt_t *p)
18013 tree t = p->val;
18014 return iterative_hash_expr (t, 0);
18017 inline bool
18018 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
18020 tree t1 = p1->val;
18021 tree t2 = p2->val;
18022 enum tree_code code = TREE_CODE (t1);
18024 if (TREE_CODE (t2) != code
18025 || TREE_TYPE (t1) != TREE_TYPE (t2))
18026 return false;
18028 if (!operand_equal_p (t1, t2, 0))
18029 return false;
18031 /* Only allow them to compare equal if they also hash equal; otherwise
18032 results are nondeterminate, and we fail bootstrap comparison. */
18033 gcc_checking_assert (hash (p1) == hash (p2));
18035 return true;