PR27116, Spelling errors found by Debian style checker
[official-gcc.git] / gcc / gimplify.cc
blob320920ed74c0a6469c65474e69987b2545da842f
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2023 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-iterator.h"
46 #include "gimple-fold.h"
47 #include "tree-eh.h"
48 #include "gimplify.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "tree-hash-traits.h"
57 #include "omp-general.h"
58 #include "omp-low.h"
59 #include "gimple-low.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "dbgcnt.h"
69 #include "omp-offload.h"
70 #include "context.h"
71 #include "tree-nested.h"
73 /* Hash set of poisoned variables in a bind expr. */
74 static hash_set<tree> *asan_poisoned_variables = NULL;
76 enum gimplify_omp_var_data
78 GOVD_SEEN = 0x000001,
79 GOVD_EXPLICIT = 0x000002,
80 GOVD_SHARED = 0x000004,
81 GOVD_PRIVATE = 0x000008,
82 GOVD_FIRSTPRIVATE = 0x000010,
83 GOVD_LASTPRIVATE = 0x000020,
84 GOVD_REDUCTION = 0x000040,
85 GOVD_LOCAL = 0x00080,
86 GOVD_MAP = 0x000100,
87 GOVD_DEBUG_PRIVATE = 0x000200,
88 GOVD_PRIVATE_OUTER_REF = 0x000400,
89 GOVD_LINEAR = 0x000800,
90 GOVD_ALIGNED = 0x001000,
92 /* Flag for GOVD_MAP: don't copy back. */
93 GOVD_MAP_TO_ONLY = 0x002000,
95 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
96 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
98 GOVD_MAP_0LEN_ARRAY = 0x008000,
100 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
101 GOVD_MAP_ALWAYS_TO = 0x010000,
103 /* Flag for shared vars that are or might be stored to in the region. */
104 GOVD_WRITTEN = 0x020000,
106 /* Flag for GOVD_MAP, if it is a forced mapping. */
107 GOVD_MAP_FORCE = 0x040000,
109 /* Flag for GOVD_MAP: must be present already. */
110 GOVD_MAP_FORCE_PRESENT = 0x080000,
112 /* Flag for GOVD_MAP: only allocate. */
113 GOVD_MAP_ALLOC_ONLY = 0x100000,
115 /* Flag for GOVD_MAP: only copy back. */
116 GOVD_MAP_FROM_ONLY = 0x200000,
118 GOVD_NONTEMPORAL = 0x400000,
120 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
121 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
123 GOVD_CONDTEMP = 0x1000000,
125 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
126 GOVD_REDUCTION_INSCAN = 0x2000000,
128 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
129 GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
131 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
132 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
133 | GOVD_LOCAL)
137 enum omp_region_type
139 ORT_WORKSHARE = 0x00,
140 ORT_TASKGROUP = 0x01,
141 ORT_SIMD = 0x04,
143 ORT_PARALLEL = 0x08,
144 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
146 ORT_TASK = 0x10,
147 ORT_UNTIED_TASK = ORT_TASK | 1,
148 ORT_TASKLOOP = ORT_TASK | 2,
149 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
151 ORT_TEAMS = 0x20,
152 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
153 ORT_HOST_TEAMS = ORT_TEAMS | 2,
154 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
156 /* Data region. */
157 ORT_TARGET_DATA = 0x40,
159 /* Data region with offloading. */
160 ORT_TARGET = 0x80,
161 ORT_COMBINED_TARGET = ORT_TARGET | 1,
162 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
164 /* OpenACC variants. */
165 ORT_ACC = 0x100, /* A generic OpenACC region. */
166 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
167 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
168 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
169 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
170 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
172 /* Dummy OpenMP region, used to disable expansion of
173 DECL_VALUE_EXPRs in taskloop pre body. */
174 ORT_NONE = 0x200
177 /* Gimplify hashtable helper. */
179 struct gimplify_hasher : free_ptr_hash <elt_t>
181 static inline hashval_t hash (const elt_t *);
182 static inline bool equal (const elt_t *, const elt_t *);
185 struct gimplify_ctx
187 struct gimplify_ctx *prev_context;
189 vec<gbind *> bind_expr_stack;
190 tree temps;
191 gimple_seq conditional_cleanups;
192 tree exit_label;
193 tree return_temp;
195 vec<tree> case_labels;
196 hash_set<tree> *live_switch_vars;
197 /* The formal temporary table. Should this be persistent? */
198 hash_table<gimplify_hasher> *temp_htab;
200 int conditions;
201 unsigned into_ssa : 1;
202 unsigned allow_rhs_cond_expr : 1;
203 unsigned in_cleanup_point_expr : 1;
204 unsigned keep_stack : 1;
205 unsigned save_stack : 1;
206 unsigned in_switch_expr : 1;
209 enum gimplify_defaultmap_kind
211 GDMK_SCALAR,
212 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
213 GDMK_AGGREGATE,
214 GDMK_ALLOCATABLE,
215 GDMK_POINTER
218 struct gimplify_omp_ctx
220 struct gimplify_omp_ctx *outer_context;
221 splay_tree variables;
222 hash_set<tree> *privatized_types;
223 tree clauses;
224 /* Iteration variables in an OMP_FOR. */
225 vec<tree> loop_iter_var;
226 location_t location;
227 enum omp_clause_default_kind default_kind;
228 enum omp_region_type region_type;
229 enum tree_code code;
230 bool combined_loop;
231 bool distribute;
232 bool target_firstprivatize_array_bases;
233 bool add_safelen1;
234 bool order_concurrent;
235 bool has_depend;
236 bool in_for_exprs;
237 int defaultmap[5];
240 static struct gimplify_ctx *gimplify_ctxp;
241 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
242 static bool in_omp_construct;
244 /* Forward declaration. */
245 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
246 static hash_map<tree, tree> *oacc_declare_returns;
247 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
248 bool (*) (tree), fallback_t, bool);
249 static void prepare_gimple_addressable (tree *, gimple_seq *);
251 /* Shorter alias name for the above function for use in gimplify.cc
252 only. */
254 static inline void
255 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
257 gimple_seq_add_stmt_without_update (seq_p, gs);
260 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
261 NULL, a new sequence is allocated. This function is
262 similar to gimple_seq_add_seq, but does not scan the operands.
263 During gimplification, we need to manipulate statement sequences
264 before the def/use vectors have been constructed. */
266 static void
267 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
269 gimple_stmt_iterator si;
271 if (src == NULL)
272 return;
274 si = gsi_last (*dst_p);
275 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
279 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
280 and popping gimplify contexts. */
282 static struct gimplify_ctx *ctx_pool = NULL;
284 /* Return a gimplify context struct from the pool. */
286 static inline struct gimplify_ctx *
287 ctx_alloc (void)
289 struct gimplify_ctx * c = ctx_pool;
291 if (c)
292 ctx_pool = c->prev_context;
293 else
294 c = XNEW (struct gimplify_ctx);
296 memset (c, '\0', sizeof (*c));
297 return c;
300 /* Put gimplify context C back into the pool. */
302 static inline void
303 ctx_free (struct gimplify_ctx *c)
305 c->prev_context = ctx_pool;
306 ctx_pool = c;
309 /* Free allocated ctx stack memory. */
311 void
312 free_gimplify_stack (void)
314 struct gimplify_ctx *c;
316 while ((c = ctx_pool))
318 ctx_pool = c->prev_context;
319 free (c);
324 /* Set up a context for the gimplifier. */
326 void
327 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
329 struct gimplify_ctx *c = ctx_alloc ();
331 c->prev_context = gimplify_ctxp;
332 gimplify_ctxp = c;
333 gimplify_ctxp->into_ssa = in_ssa;
334 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
337 /* Tear down a context for the gimplifier. If BODY is non-null, then
338 put the temporaries into the outer BIND_EXPR. Otherwise, put them
339 in the local_decls.
341 BODY is not a sequence, but the first tuple in a sequence. */
343 void
344 pop_gimplify_context (gimple *body)
346 struct gimplify_ctx *c = gimplify_ctxp;
348 gcc_assert (c
349 && (!c->bind_expr_stack.exists ()
350 || c->bind_expr_stack.is_empty ()));
351 c->bind_expr_stack.release ();
352 gimplify_ctxp = c->prev_context;
354 if (body)
355 declare_vars (c->temps, body, false);
356 else
357 record_vars (c->temps);
359 delete c->temp_htab;
360 c->temp_htab = NULL;
361 ctx_free (c);
364 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
366 static void
367 gimple_push_bind_expr (gbind *bind_stmt)
369 gimplify_ctxp->bind_expr_stack.reserve (8);
370 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
373 /* Pop the first element off the stack of bindings. */
375 static void
376 gimple_pop_bind_expr (void)
378 gimplify_ctxp->bind_expr_stack.pop ();
381 /* Return the first element of the stack of bindings. */
383 gbind *
384 gimple_current_bind_expr (void)
386 return gimplify_ctxp->bind_expr_stack.last ();
389 /* Return the stack of bindings created during gimplification. */
391 vec<gbind *>
392 gimple_bind_expr_stack (void)
394 return gimplify_ctxp->bind_expr_stack;
397 /* Return true iff there is a COND_EXPR between us and the innermost
398 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
400 static bool
401 gimple_conditional_context (void)
403 return gimplify_ctxp->conditions > 0;
406 /* Note that we've entered a COND_EXPR. */
408 static void
409 gimple_push_condition (void)
411 #ifdef ENABLE_GIMPLE_CHECKING
412 if (gimplify_ctxp->conditions == 0)
413 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
414 #endif
415 ++(gimplify_ctxp->conditions);
418 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
419 now, add any conditional cleanups we've seen to the prequeue. */
421 static void
422 gimple_pop_condition (gimple_seq *pre_p)
424 int conds = --(gimplify_ctxp->conditions);
426 gcc_assert (conds >= 0);
427 if (conds == 0)
429 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
430 gimplify_ctxp->conditional_cleanups = NULL;
434 /* A stable comparison routine for use with splay trees and DECLs. */
436 static int
437 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
439 tree a = (tree) xa;
440 tree b = (tree) xb;
442 return DECL_UID (a) - DECL_UID (b);
445 /* Create a new omp construct that deals with variable remapping. */
447 static struct gimplify_omp_ctx *
448 new_omp_context (enum omp_region_type region_type)
450 struct gimplify_omp_ctx *c;
452 c = XCNEW (struct gimplify_omp_ctx);
453 c->outer_context = gimplify_omp_ctxp;
454 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
455 c->privatized_types = new hash_set<tree>;
456 c->location = input_location;
457 c->region_type = region_type;
458 if ((region_type & ORT_TASK) == 0)
459 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
460 else
461 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
462 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
463 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
464 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
465 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
466 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
468 return c;
471 /* Destroy an omp construct that deals with variable remapping. */
473 static void
474 delete_omp_context (struct gimplify_omp_ctx *c)
476 splay_tree_delete (c->variables);
477 delete c->privatized_types;
478 c->loop_iter_var.release ();
479 XDELETE (c);
482 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
483 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
485 /* Both gimplify the statement T and append it to *SEQ_P. This function
486 behaves exactly as gimplify_stmt, but you don't have to pass T as a
487 reference. */
489 void
490 gimplify_and_add (tree t, gimple_seq *seq_p)
492 gimplify_stmt (&t, seq_p);
495 /* Gimplify statement T into sequence *SEQ_P, and return the first
496 tuple in the sequence of generated tuples for this statement.
497 Return NULL if gimplifying T produced no tuples. */
499 static gimple *
500 gimplify_and_return_first (tree t, gimple_seq *seq_p)
502 gimple_stmt_iterator last = gsi_last (*seq_p);
504 gimplify_and_add (t, seq_p);
506 if (!gsi_end_p (last))
508 gsi_next (&last);
509 return gsi_stmt (last);
511 else
512 return gimple_seq_first_stmt (*seq_p);
515 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
516 LHS, or for a call argument. */
518 static bool
519 is_gimple_mem_rhs (tree t)
521 /* If we're dealing with a renamable type, either source or dest must be
522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t)))
524 return is_gimple_val (t);
525 else
526 return is_gimple_val (t) || is_gimple_lvalue (t);
529 /* Return true if T is a CALL_EXPR or an expression that can be
530 assigned to a temporary. Note that this predicate should only be
531 used during gimplification. See the rationale for this in
532 gimplify_modify_expr. */
534 static bool
535 is_gimple_reg_rhs_or_call (tree t)
537 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
538 || TREE_CODE (t) == CALL_EXPR);
541 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
542 this predicate should only be used during gimplification. See the
543 rationale for this in gimplify_modify_expr. */
545 static bool
546 is_gimple_mem_rhs_or_call (tree t)
548 /* If we're dealing with a renamable type, either source or dest must be
549 a renamed variable. */
550 if (is_gimple_reg_type (TREE_TYPE (t)))
551 return is_gimple_val (t);
552 else
553 return (is_gimple_val (t)
554 || is_gimple_lvalue (t)
555 || TREE_CLOBBER_P (t)
556 || TREE_CODE (t) == CALL_EXPR);
559 /* Create a temporary with a name derived from VAL. Subroutine of
560 lookup_tmp_var; nobody else should call this function. */
562 static inline tree
563 create_tmp_from_val (tree val)
565 /* Drop all qualifiers and address-space information from the value type. */
566 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
567 tree var = create_tmp_var (type, get_name (val));
568 return var;
571 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
572 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
574 static tree
575 lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
577 tree ret;
579 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
580 gcc_assert (!is_formal || !not_gimple_reg);
582 /* If not optimizing, never really reuse a temporary. local-alloc
583 won't allocate any variable that is used in more than one basic
584 block, which means it will go into memory, causing much extra
585 work in reload and final and poorer code generation, outweighing
586 the extra memory allocation here. */
587 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
589 ret = create_tmp_from_val (val);
590 DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
592 else
594 elt_t elt, *elt_p;
595 elt_t **slot;
597 elt.val = val;
598 if (!gimplify_ctxp->temp_htab)
599 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
600 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
601 if (*slot == NULL)
603 elt_p = XNEW (elt_t);
604 elt_p->val = val;
605 elt_p->temp = ret = create_tmp_from_val (val);
606 *slot = elt_p;
608 else
610 elt_p = *slot;
611 ret = elt_p->temp;
615 return ret;
618 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
620 static tree
621 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
622 bool is_formal, bool allow_ssa, bool not_gimple_reg)
624 tree t, mod;
626 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
627 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
628 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
629 fb_rvalue);
631 if (allow_ssa
632 && gimplify_ctxp->into_ssa
633 && is_gimple_reg_type (TREE_TYPE (val)))
635 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
636 if (! gimple_in_ssa_p (cfun))
638 const char *name = get_name (val);
639 if (name)
640 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
643 else
644 t = lookup_tmp_var (val, is_formal, not_gimple_reg);
646 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
648 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
650 /* gimplify_modify_expr might want to reduce this further. */
651 gimplify_and_add (mod, pre_p);
652 ggc_free (mod);
654 return t;
657 /* Return a formal temporary variable initialized with VAL. PRE_P is as
658 in gimplify_expr. Only use this function if:
660 1) The value of the unfactored expression represented by VAL will not
661 change between the initialization and use of the temporary, and
662 2) The temporary will not be otherwise modified.
664 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
665 and #2 means it is inappropriate for && temps.
667 For other cases, use get_initialized_tmp_var instead. */
669 tree
670 get_formal_tmp_var (tree val, gimple_seq *pre_p)
672 return internal_get_tmp_var (val, pre_p, NULL, true, true, false);
675 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
676 are as in gimplify_expr. */
678 tree
679 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
680 gimple_seq *post_p /* = NULL */,
681 bool allow_ssa /* = true */)
683 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa, false);
686 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
687 generate debug info for them; otherwise don't. */
689 void
690 declare_vars (tree vars, gimple *gs, bool debug_info)
692 tree last = vars;
693 if (last)
695 tree temps, block;
697 gbind *scope = as_a <gbind *> (gs);
699 temps = nreverse (last);
701 block = gimple_bind_block (scope);
702 gcc_assert (!block || TREE_CODE (block) == BLOCK);
703 if (!block || !debug_info)
705 DECL_CHAIN (last) = gimple_bind_vars (scope);
706 gimple_bind_set_vars (scope, temps);
708 else
710 /* We need to attach the nodes both to the BIND_EXPR and to its
711 associated BLOCK for debugging purposes. The key point here
712 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
713 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
714 if (BLOCK_VARS (block))
715 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
716 else
718 gimple_bind_set_vars (scope,
719 chainon (gimple_bind_vars (scope), temps));
720 BLOCK_VARS (block) = temps;
726 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
727 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
728 no such upper bound can be obtained. */
730 static void
731 force_constant_size (tree var)
733 /* The only attempt we make is by querying the maximum size of objects
734 of the variable's type. */
736 HOST_WIDE_INT max_size;
738 gcc_assert (VAR_P (var));
740 max_size = max_int_size_in_bytes (TREE_TYPE (var));
742 gcc_assert (max_size >= 0);
744 DECL_SIZE_UNIT (var)
745 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
746 DECL_SIZE (var)
747 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
750 /* Push the temporary variable TMP into the current binding. */
752 void
753 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
755 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
757 /* Later processing assumes that the object size is constant, which might
758 not be true at this point. Force the use of a constant upper bound in
759 this case. */
760 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
761 force_constant_size (tmp);
763 DECL_CONTEXT (tmp) = fn->decl;
764 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
766 record_vars_into (tmp, fn->decl);
769 /* Push the temporary variable TMP into the current binding. */
771 void
772 gimple_add_tmp_var (tree tmp)
774 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
776 /* Later processing assumes that the object size is constant, which might
777 not be true at this point. Force the use of a constant upper bound in
778 this case. */
779 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
780 force_constant_size (tmp);
782 DECL_CONTEXT (tmp) = current_function_decl;
783 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
785 if (gimplify_ctxp)
787 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
788 gimplify_ctxp->temps = tmp;
790 /* Mark temporaries local within the nearest enclosing parallel. */
791 if (gimplify_omp_ctxp)
793 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
794 int flag = GOVD_LOCAL | GOVD_SEEN;
795 while (ctx
796 && (ctx->region_type == ORT_WORKSHARE
797 || ctx->region_type == ORT_TASKGROUP
798 || ctx->region_type == ORT_SIMD
799 || ctx->region_type == ORT_ACC))
801 if (ctx->region_type == ORT_SIMD
802 && TREE_ADDRESSABLE (tmp)
803 && !TREE_STATIC (tmp))
805 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
806 ctx->add_safelen1 = true;
807 else if (ctx->in_for_exprs)
808 flag = GOVD_PRIVATE;
809 else
810 flag = GOVD_PRIVATE | GOVD_SEEN;
811 break;
813 ctx = ctx->outer_context;
815 if (ctx)
816 omp_add_variable (ctx, tmp, flag);
819 else if (cfun)
820 record_vars (tmp);
821 else
823 gimple_seq body_seq;
825 /* This case is for nested functions. We need to expose the locals
826 they create. */
827 body_seq = gimple_body (current_function_decl);
828 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
834 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
835 nodes that are referenced more than once in GENERIC functions. This is
836 necessary because gimplification (translation into GIMPLE) is performed
837 by modifying tree nodes in-place, so gimplication of a shared node in a
838 first context could generate an invalid GIMPLE form in a second context.
840 This is achieved with a simple mark/copy/unmark algorithm that walks the
841 GENERIC representation top-down, marks nodes with TREE_VISITED the first
842 time it encounters them, duplicates them if they already have TREE_VISITED
843 set, and finally removes the TREE_VISITED marks it has set.
845 The algorithm works only at the function level, i.e. it generates a GENERIC
846 representation of a function with no nodes shared within the function when
847 passed a GENERIC function (except for nodes that are allowed to be shared).
849 At the global level, it is also necessary to unshare tree nodes that are
850 referenced in more than one function, for the same aforementioned reason.
851 This requires some cooperation from the front-end. There are 2 strategies:
853 1. Manual unsharing. The front-end needs to call unshare_expr on every
854 expression that might end up being shared across functions.
856 2. Deep unsharing. This is an extension of regular unsharing. Instead
857 of calling unshare_expr on expressions that might be shared across
858 functions, the front-end pre-marks them with TREE_VISITED. This will
859 ensure that they are unshared on the first reference within functions
860 when the regular unsharing algorithm runs. The counterpart is that
861 this algorithm must look deeper than for manual unsharing, which is
862 specified by LANG_HOOKS_DEEP_UNSHARING.
864 If there are only few specific cases of node sharing across functions, it is
865 probably easier for a front-end to unshare the expressions manually. On the
866 contrary, if the expressions generated at the global level are as widespread
867 as expressions generated within functions, deep unsharing is very likely the
868 way to go. */
870 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
871 These nodes model computations that must be done once. If we were to
872 unshare something like SAVE_EXPR(i++), the gimplification process would
873 create wrong code. However, if DATA is non-null, it must hold a pointer
874 set that is used to unshare the subtrees of these nodes. */
876 static tree
877 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
879 tree t = *tp;
880 enum tree_code code = TREE_CODE (t);
882 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
883 copy their subtrees if we can make sure to do it only once. */
884 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
886 if (data && !((hash_set<tree> *)data)->add (t))
888 else
889 *walk_subtrees = 0;
892 /* Stop at types, decls, constants like copy_tree_r. */
893 else if (TREE_CODE_CLASS (code) == tcc_type
894 || TREE_CODE_CLASS (code) == tcc_declaration
895 || TREE_CODE_CLASS (code) == tcc_constant)
896 *walk_subtrees = 0;
898 /* Cope with the statement expression extension. */
899 else if (code == STATEMENT_LIST)
902 /* Leave the bulk of the work to copy_tree_r itself. */
903 else
904 copy_tree_r (tp, walk_subtrees, NULL);
906 return NULL_TREE;
909 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
910 If *TP has been visited already, then *TP is deeply copied by calling
911 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
913 static tree
914 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
916 tree t = *tp;
917 enum tree_code code = TREE_CODE (t);
919 /* Skip types, decls, and constants. But we do want to look at their
920 types and the bounds of types. Mark them as visited so we properly
921 unmark their subtrees on the unmark pass. If we've already seen them,
922 don't look down further. */
923 if (TREE_CODE_CLASS (code) == tcc_type
924 || TREE_CODE_CLASS (code) == tcc_declaration
925 || TREE_CODE_CLASS (code) == tcc_constant)
927 if (TREE_VISITED (t))
928 *walk_subtrees = 0;
929 else
930 TREE_VISITED (t) = 1;
933 /* If this node has been visited already, unshare it and don't look
934 any deeper. */
935 else if (TREE_VISITED (t))
937 walk_tree (tp, mostly_copy_tree_r, data, NULL);
938 *walk_subtrees = 0;
941 /* Otherwise, mark the node as visited and keep looking. */
942 else
943 TREE_VISITED (t) = 1;
945 return NULL_TREE;
948 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
949 copy_if_shared_r callback unmodified. */
951 void
952 copy_if_shared (tree *tp, void *data)
954 walk_tree (tp, copy_if_shared_r, data, NULL);
957 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
958 any nested functions. */
960 static void
961 unshare_body (tree fndecl)
963 struct cgraph_node *cgn = cgraph_node::get (fndecl);
964 /* If the language requires deep unsharing, we need a pointer set to make
965 sure we don't repeatedly unshare subtrees of unshareable nodes. */
966 hash_set<tree> *visited
967 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
969 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
970 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
971 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
973 delete visited;
975 if (cgn)
976 for (cgn = first_nested_function (cgn); cgn;
977 cgn = next_nested_function (cgn))
978 unshare_body (cgn->decl);
981 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
982 Subtrees are walked until the first unvisited node is encountered. */
984 static tree
985 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
987 tree t = *tp;
989 /* If this node has been visited, unmark it and keep looking. */
990 if (TREE_VISITED (t))
991 TREE_VISITED (t) = 0;
993 /* Otherwise, don't look any deeper. */
994 else
995 *walk_subtrees = 0;
997 return NULL_TREE;
1000 /* Unmark the visited trees rooted at *TP. */
1002 static inline void
1003 unmark_visited (tree *tp)
1005 walk_tree (tp, unmark_visited_r, NULL, NULL);
1008 /* Likewise, but mark all trees as not visited. */
1010 static void
1011 unvisit_body (tree fndecl)
1013 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1015 unmark_visited (&DECL_SAVED_TREE (fndecl));
1016 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1017 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1019 if (cgn)
1020 for (cgn = first_nested_function (cgn);
1021 cgn; cgn = next_nested_function (cgn))
1022 unvisit_body (cgn->decl);
1025 /* Unconditionally make an unshared copy of EXPR. This is used when using
1026 stored expressions which span multiple functions, such as BINFO_VTABLE,
1027 as the normal unsharing process can't tell that they're shared. */
1029 tree
1030 unshare_expr (tree expr)
1032 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1033 return expr;
1036 /* Worker for unshare_expr_without_location. */
1038 static tree
1039 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1041 if (EXPR_P (*tp))
1042 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1043 else
1044 *walk_subtrees = 0;
1045 return NULL_TREE;
1048 /* Similar to unshare_expr but also prune all expression locations
1049 from EXPR. */
1051 tree
1052 unshare_expr_without_location (tree expr)
1054 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1055 if (EXPR_P (expr))
1056 walk_tree (&expr, prune_expr_location, NULL, NULL);
1057 return expr;
1060 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1061 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1062 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1063 EXPR is the location of the EXPR. */
1065 static location_t
1066 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1068 if (!expr)
1069 return or_else;
1071 if (EXPR_HAS_LOCATION (expr))
1072 return EXPR_LOCATION (expr);
1074 if (TREE_CODE (expr) != STATEMENT_LIST)
1075 return or_else;
1077 tree_stmt_iterator i = tsi_start (expr);
1079 bool found = false;
1080 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1082 found = true;
1083 tsi_next (&i);
1086 if (!found || !tsi_one_before_end_p (i))
1087 return or_else;
1089 return rexpr_location (tsi_stmt (i), or_else);
1092 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1093 rexpr_location for the potential recursion. */
1095 static inline bool
1096 rexpr_has_location (tree expr)
1098 return rexpr_location (expr) != UNKNOWN_LOCATION;
1102 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1103 contain statements and have a value. Assign its value to a temporary
1104 and give it void_type_node. Return the temporary, or NULL_TREE if
1105 WRAPPER was already void. */
1107 tree
1108 voidify_wrapper_expr (tree wrapper, tree temp)
1110 tree type = TREE_TYPE (wrapper);
1111 if (type && !VOID_TYPE_P (type))
1113 tree *p;
1115 /* Set p to point to the body of the wrapper. Loop until we find
1116 something that isn't a wrapper. */
1117 for (p = &wrapper; p && *p; )
1119 switch (TREE_CODE (*p))
1121 case BIND_EXPR:
1122 TREE_SIDE_EFFECTS (*p) = 1;
1123 TREE_TYPE (*p) = void_type_node;
1124 /* For a BIND_EXPR, the body is operand 1. */
1125 p = &BIND_EXPR_BODY (*p);
1126 break;
1128 case CLEANUP_POINT_EXPR:
1129 case TRY_FINALLY_EXPR:
1130 case TRY_CATCH_EXPR:
1131 TREE_SIDE_EFFECTS (*p) = 1;
1132 TREE_TYPE (*p) = void_type_node;
1133 p = &TREE_OPERAND (*p, 0);
1134 break;
1136 case STATEMENT_LIST:
1138 tree_stmt_iterator i = tsi_last (*p);
1139 TREE_SIDE_EFFECTS (*p) = 1;
1140 TREE_TYPE (*p) = void_type_node;
1141 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1143 break;
1145 case COMPOUND_EXPR:
1146 /* Advance to the last statement. Set all container types to
1147 void. */
1148 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1150 TREE_SIDE_EFFECTS (*p) = 1;
1151 TREE_TYPE (*p) = void_type_node;
1153 break;
1155 case TRANSACTION_EXPR:
1156 TREE_SIDE_EFFECTS (*p) = 1;
1157 TREE_TYPE (*p) = void_type_node;
1158 p = &TRANSACTION_EXPR_BODY (*p);
1159 break;
1161 default:
1162 /* Assume that any tree upon which voidify_wrapper_expr is
1163 directly called is a wrapper, and that its body is op0. */
1164 if (p == &wrapper)
1166 TREE_SIDE_EFFECTS (*p) = 1;
1167 TREE_TYPE (*p) = void_type_node;
1168 p = &TREE_OPERAND (*p, 0);
1169 break;
1171 goto out;
1175 out:
1176 if (p == NULL || IS_EMPTY_STMT (*p))
1177 temp = NULL_TREE;
1178 else if (temp)
1180 /* The wrapper is on the RHS of an assignment that we're pushing
1181 down. */
1182 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1183 || TREE_CODE (temp) == MODIFY_EXPR);
1184 TREE_OPERAND (temp, 1) = *p;
1185 *p = temp;
1187 else
1189 temp = create_tmp_var (type, "retval");
1190 *p = build2 (INIT_EXPR, type, temp, *p);
1193 return temp;
1196 return NULL_TREE;
1199 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1200 a temporary through which they communicate. */
1202 static void
1203 build_stack_save_restore (gcall **save, gcall **restore)
1205 tree tmp_var;
1207 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1208 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1209 gimple_call_set_lhs (*save, tmp_var);
1211 *restore
1212 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1213 1, tmp_var);
1216 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1218 static tree
1219 build_asan_poison_call_expr (tree decl)
1221 /* Do not poison variables that have size equal to zero. */
1222 tree unit_size = DECL_SIZE_UNIT (decl);
1223 if (zerop (unit_size))
1224 return NULL_TREE;
1226 tree base = build_fold_addr_expr (decl);
1228 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1229 void_type_node, 3,
1230 build_int_cst (integer_type_node,
1231 ASAN_MARK_POISON),
1232 base, unit_size);
1235 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1236 on POISON flag, shadow memory of a DECL variable. The call will be
1237 put on location identified by IT iterator, where BEFORE flag drives
1238 position where the stmt will be put. */
1240 static void
1241 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1242 bool before)
1244 tree unit_size = DECL_SIZE_UNIT (decl);
1245 tree base = build_fold_addr_expr (decl);
1247 /* Do not poison variables that have size equal to zero. */
1248 if (zerop (unit_size))
1249 return;
1251 /* It's necessary to have all stack variables aligned to ASAN granularity
1252 bytes. */
1253 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1254 unsigned shadow_granularity
1255 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1256 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1257 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1259 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1261 gimple *g
1262 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1263 build_int_cst (integer_type_node, flags),
1264 base, unit_size);
1266 if (before)
1267 gsi_insert_before (it, g, GSI_NEW_STMT);
1268 else
1269 gsi_insert_after (it, g, GSI_NEW_STMT);
1272 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1273 either poisons or unpoisons a DECL. Created statement is appended
1274 to SEQ_P gimple sequence. */
1276 static void
1277 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1279 gimple_stmt_iterator it = gsi_last (*seq_p);
1280 bool before = false;
1282 if (gsi_end_p (it))
1283 before = true;
1285 asan_poison_variable (decl, poison, &it, before);
1288 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1290 static int
1291 sort_by_decl_uid (const void *a, const void *b)
1293 const tree *t1 = (const tree *)a;
1294 const tree *t2 = (const tree *)b;
1296 int uid1 = DECL_UID (*t1);
1297 int uid2 = DECL_UID (*t2);
1299 if (uid1 < uid2)
1300 return -1;
1301 else if (uid1 > uid2)
1302 return 1;
1303 else
1304 return 0;
1307 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1308 depending on POISON flag. Created statement is appended
1309 to SEQ_P gimple sequence. */
1311 static void
1312 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1314 unsigned c = variables->elements ();
1315 if (c == 0)
1316 return;
1318 auto_vec<tree> sorted_variables (c);
1320 for (hash_set<tree>::iterator it = variables->begin ();
1321 it != variables->end (); ++it)
1322 sorted_variables.safe_push (*it);
1324 sorted_variables.qsort (sort_by_decl_uid);
1326 unsigned i;
1327 tree var;
1328 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1330 asan_poison_variable (var, poison, seq_p);
1332 /* Add use_after_scope_memory attribute for the variable in order
1333 to prevent re-written into SSA. */
1334 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1335 DECL_ATTRIBUTES (var)))
1336 DECL_ATTRIBUTES (var)
1337 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1338 integer_one_node,
1339 DECL_ATTRIBUTES (var));
1343 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1345 static enum gimplify_status
1346 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1348 tree bind_expr = *expr_p;
1349 bool old_keep_stack = gimplify_ctxp->keep_stack;
1350 bool old_save_stack = gimplify_ctxp->save_stack;
1351 tree t;
1352 gbind *bind_stmt;
1353 gimple_seq body, cleanup;
1354 gcall *stack_save;
1355 location_t start_locus = 0, end_locus = 0;
1356 tree ret_clauses = NULL;
1358 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1360 /* Mark variables seen in this bind expr. */
1361 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1363 if (VAR_P (t))
1365 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1367 /* Mark variable as local. */
1368 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1370 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1371 || splay_tree_lookup (ctx->variables,
1372 (splay_tree_key) t) == NULL)
1374 int flag = GOVD_LOCAL;
1375 if (ctx->region_type == ORT_SIMD
1376 && TREE_ADDRESSABLE (t)
1377 && !TREE_STATIC (t))
1379 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1380 ctx->add_safelen1 = true;
1381 else
1382 flag = GOVD_PRIVATE;
1384 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1386 /* Static locals inside of target construct or offloaded
1387 routines need to be "omp declare target". */
1388 if (TREE_STATIC (t))
1389 for (; ctx; ctx = ctx->outer_context)
1390 if ((ctx->region_type & ORT_TARGET) != 0)
1392 if (!lookup_attribute ("omp declare target",
1393 DECL_ATTRIBUTES (t)))
1395 tree id = get_identifier ("omp declare target");
1396 DECL_ATTRIBUTES (t)
1397 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1398 varpool_node *node = varpool_node::get (t);
1399 if (node)
1401 node->offloadable = 1;
1402 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1404 g->have_offload = true;
1405 if (!in_lto_p)
1406 vec_safe_push (offload_vars, t);
1410 break;
1414 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1416 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1417 cfun->has_local_explicit_reg_vars = true;
1421 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1422 BIND_EXPR_BLOCK (bind_expr));
1423 gimple_push_bind_expr (bind_stmt);
1425 gimplify_ctxp->keep_stack = false;
1426 gimplify_ctxp->save_stack = false;
1428 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1429 body = NULL;
1430 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1431 gimple_bind_set_body (bind_stmt, body);
1433 /* Source location wise, the cleanup code (stack_restore and clobbers)
1434 belongs to the end of the block, so propagate what we have. The
1435 stack_save operation belongs to the beginning of block, which we can
1436 infer from the bind_expr directly if the block has no explicit
1437 assignment. */
1438 if (BIND_EXPR_BLOCK (bind_expr))
1440 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1441 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1443 if (start_locus == 0)
1444 start_locus = EXPR_LOCATION (bind_expr);
1446 cleanup = NULL;
1447 stack_save = NULL;
1449 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1450 the stack space allocated to the VLAs. */
1451 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1453 gcall *stack_restore;
1455 /* Save stack on entry and restore it on exit. Add a try_finally
1456 block to achieve this. */
1457 build_stack_save_restore (&stack_save, &stack_restore);
1459 gimple_set_location (stack_save, start_locus);
1460 gimple_set_location (stack_restore, end_locus);
1462 gimplify_seq_add_stmt (&cleanup, stack_restore);
1465 /* Add clobbers for all variables that go out of scope. */
1466 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1468 if (VAR_P (t)
1469 && !is_global_var (t)
1470 && DECL_CONTEXT (t) == current_function_decl)
1472 if (!DECL_HARD_REGISTER (t)
1473 && !TREE_THIS_VOLATILE (t)
1474 && !DECL_HAS_VALUE_EXPR_P (t)
1475 /* Only care for variables that have to be in memory. Others
1476 will be rewritten into SSA names, hence moved to the
1477 top-level. */
1478 && !is_gimple_reg (t)
1479 && flag_stack_reuse != SR_NONE)
1481 tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_EOL);
1482 gimple *clobber_stmt;
1483 clobber_stmt = gimple_build_assign (t, clobber);
1484 gimple_set_location (clobber_stmt, end_locus);
1485 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1488 if (flag_openacc && oacc_declare_returns != NULL)
1490 tree key = t;
1491 if (DECL_HAS_VALUE_EXPR_P (key))
1493 key = DECL_VALUE_EXPR (key);
1494 if (INDIRECT_REF_P (key))
1495 key = TREE_OPERAND (key, 0);
1497 tree *c = oacc_declare_returns->get (key);
1498 if (c != NULL)
1500 if (ret_clauses)
1501 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1503 ret_clauses = unshare_expr (*c);
1505 oacc_declare_returns->remove (key);
1507 if (oacc_declare_returns->is_empty ())
1509 delete oacc_declare_returns;
1510 oacc_declare_returns = NULL;
1516 if (asan_poisoned_variables != NULL
1517 && asan_poisoned_variables->contains (t))
1519 asan_poisoned_variables->remove (t);
1520 asan_poison_variable (t, true, &cleanup);
1523 if (gimplify_ctxp->live_switch_vars != NULL
1524 && gimplify_ctxp->live_switch_vars->contains (t))
1525 gimplify_ctxp->live_switch_vars->remove (t);
1528 if (ret_clauses)
1530 gomp_target *stmt;
1531 gimple_stmt_iterator si = gsi_start (cleanup);
1533 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1534 ret_clauses);
1535 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1538 if (cleanup)
1540 gtry *gs;
1541 gimple_seq new_body;
1543 new_body = NULL;
1544 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1545 GIMPLE_TRY_FINALLY);
1547 if (stack_save)
1548 gimplify_seq_add_stmt (&new_body, stack_save);
1549 gimplify_seq_add_stmt (&new_body, gs);
1550 gimple_bind_set_body (bind_stmt, new_body);
1553 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1554 if (!gimplify_ctxp->keep_stack)
1555 gimplify_ctxp->keep_stack = old_keep_stack;
1556 gimplify_ctxp->save_stack = old_save_stack;
1558 gimple_pop_bind_expr ();
1560 gimplify_seq_add_stmt (pre_p, bind_stmt);
1562 if (temp)
1564 *expr_p = temp;
1565 return GS_OK;
1568 *expr_p = NULL_TREE;
1569 return GS_ALL_DONE;
1572 /* Maybe add early return predict statement to PRE_P sequence. */
1574 static void
1575 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1577 /* If we are not in a conditional context, add PREDICT statement. */
1578 if (gimple_conditional_context ())
1580 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1581 NOT_TAKEN);
1582 gimplify_seq_add_stmt (pre_p, predict);
1586 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1587 GIMPLE value, it is assigned to a new temporary and the statement is
1588 re-written to return the temporary.
1590 PRE_P points to the sequence where side effects that must happen before
1591 STMT should be stored. */
1593 static enum gimplify_status
1594 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1596 greturn *ret;
1597 tree ret_expr = TREE_OPERAND (stmt, 0);
1598 tree result_decl, result;
1600 if (ret_expr == error_mark_node)
1601 return GS_ERROR;
1603 if (!ret_expr
1604 || TREE_CODE (ret_expr) == RESULT_DECL)
1606 maybe_add_early_return_predict_stmt (pre_p);
1607 greturn *ret = gimple_build_return (ret_expr);
1608 copy_warning (ret, stmt);
1609 gimplify_seq_add_stmt (pre_p, ret);
1610 return GS_ALL_DONE;
1613 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1614 result_decl = NULL_TREE;
1615 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1617 /* Used in C++ for handling EH cleanup of the return value if a local
1618 cleanup throws. Assume the front-end knows what it's doing. */
1619 result_decl = DECL_RESULT (current_function_decl);
1620 /* But crash if we end up trying to modify ret_expr below. */
1621 ret_expr = NULL_TREE;
1623 else
1625 result_decl = TREE_OPERAND (ret_expr, 0);
1627 /* See through a return by reference. */
1628 if (INDIRECT_REF_P (result_decl))
1629 result_decl = TREE_OPERAND (result_decl, 0);
1631 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1632 || TREE_CODE (ret_expr) == INIT_EXPR)
1633 && TREE_CODE (result_decl) == RESULT_DECL);
1636 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1637 Recall that aggregate_value_p is FALSE for any aggregate type that is
1638 returned in registers. If we're returning values in registers, then
1639 we don't want to extend the lifetime of the RESULT_DECL, particularly
1640 across another call. In addition, for those aggregates for which
1641 hard_function_value generates a PARALLEL, we'll die during normal
1642 expansion of structure assignments; there's special code in expand_return
1643 to handle this case that does not exist in expand_expr. */
1644 if (!result_decl)
1645 result = NULL_TREE;
1646 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1648 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1650 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1651 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1652 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1653 should be effectively allocated by the caller, i.e. all calls to
1654 this function must be subject to the Return Slot Optimization. */
1655 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1656 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1658 result = result_decl;
1660 else if (gimplify_ctxp->return_temp)
1661 result = gimplify_ctxp->return_temp;
1662 else
1664 result = create_tmp_reg (TREE_TYPE (result_decl));
1666 /* ??? With complex control flow (usually involving abnormal edges),
1667 we can wind up warning about an uninitialized value for this. Due
1668 to how this variable is constructed and initialized, this is never
1669 true. Give up and never warn. */
1670 suppress_warning (result, OPT_Wuninitialized);
1672 gimplify_ctxp->return_temp = result;
1675 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1676 Then gimplify the whole thing. */
1677 if (result != result_decl)
1678 TREE_OPERAND (ret_expr, 0) = result;
1680 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1682 maybe_add_early_return_predict_stmt (pre_p);
1683 ret = gimple_build_return (result);
1684 copy_warning (ret, stmt);
1685 gimplify_seq_add_stmt (pre_p, ret);
1687 return GS_ALL_DONE;
1690 /* Gimplify a variable-length array DECL. */
1692 static void
1693 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1695 /* This is a variable-sized decl. Simplify its size and mark it
1696 for deferred expansion. */
1697 tree t, addr, ptr_type;
1699 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1700 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1702 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1703 if (DECL_HAS_VALUE_EXPR_P (decl))
1704 return;
1706 /* All occurrences of this decl in final gimplified code will be
1707 replaced by indirection. Setting DECL_VALUE_EXPR does two
1708 things: First, it lets the rest of the gimplifier know what
1709 replacement to use. Second, it lets the debug info know
1710 where to find the value. */
1711 ptr_type = build_pointer_type (TREE_TYPE (decl));
1712 addr = create_tmp_var (ptr_type, get_name (decl));
1713 DECL_IGNORED_P (addr) = 0;
1714 t = build_fold_indirect_ref (addr);
1715 TREE_THIS_NOTRAP (t) = 1;
1716 SET_DECL_VALUE_EXPR (decl, t);
1717 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1719 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1720 max_int_size_in_bytes (TREE_TYPE (decl)));
1721 /* The call has been built for a variable-sized object. */
1722 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1723 t = fold_convert (ptr_type, t);
1724 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1726 gimplify_and_add (t, seq_p);
1728 /* Record the dynamic allocation associated with DECL if requested. */
1729 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1730 record_dynamic_alloc (decl);
1733 /* A helper function to be called via walk_tree. Mark all labels under *TP
1734 as being forced. To be called for DECL_INITIAL of static variables. */
1736 static tree
1737 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1739 if (TYPE_P (*tp))
1740 *walk_subtrees = 0;
1741 if (TREE_CODE (*tp) == LABEL_DECL)
1743 FORCED_LABEL (*tp) = 1;
1744 cfun->has_forced_label_in_static = 1;
1747 return NULL_TREE;
1750 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1751 Build a call to internal const function DEFERRED_INIT:
1752 1st argument: SIZE of the DECL;
1753 2nd argument: INIT_TYPE;
1754 3rd argument: NAME of the DECL;
1756 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1758 static void
1759 gimple_add_init_for_auto_var (tree decl,
1760 enum auto_init_type init_type,
1761 gimple_seq *seq_p)
1763 gcc_assert (auto_var_p (decl));
1764 gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
1765 location_t loc = EXPR_LOCATION (decl);
1766 tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
1768 tree init_type_node
1769 = build_int_cst (integer_type_node, (int) init_type);
1771 tree decl_name = NULL_TREE;
1772 if (DECL_NAME (decl))
1774 decl_name = build_string_literal (DECL_NAME (decl));
1776 else
1778 char decl_name_anonymous[3 + (HOST_BITS_PER_INT + 2) / 3];
1779 sprintf (decl_name_anonymous, "D.%u", DECL_UID (decl));
1780 decl_name = build_string_literal (decl_name_anonymous);
1783 tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
1784 TREE_TYPE (decl), 3,
1785 decl_size, init_type_node,
1786 decl_name);
1788 gimplify_assign (decl, call, seq_p);
1791 /* Generate padding initialization for automatic vairable DECL.
1792 C guarantees that brace-init with fewer initializers than members
1793 aggregate will initialize the rest of the aggregate as-if it were
1794 static initialization. In turn static initialization guarantees
1795 that padding is initialized to zero. So, we always initialize paddings
1796 to zeroes regardless INIT_TYPE.
1797 To do the padding initialization, we insert a call to
1798 __builtin_clear_padding (&decl, 0, for_auto_init = true).
1799 Note, we add an additional dummy argument for __builtin_clear_padding,
1800 'for_auto_init' to distinguish whether this call is for automatic
1801 variable initialization or not.
1803 static void
1804 gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
1805 gimple_seq *seq_p)
1807 tree addr_of_decl = NULL_TREE;
1808 tree fn = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
1810 if (is_vla)
1812 /* The temporary address variable for this vla should be
1813 created in gimplify_vla_decl. */
1814 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
1815 gcc_assert (INDIRECT_REF_P (DECL_VALUE_EXPR (decl)));
1816 addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
1818 else
1820 mark_addressable (decl);
1821 addr_of_decl = build_fold_addr_expr (decl);
1824 gimple *call = gimple_build_call (fn, 2, addr_of_decl,
1825 build_one_cst (TREE_TYPE (addr_of_decl)));
1826 gimplify_seq_add_stmt (seq_p, call);
1829 /* Return true if the DECL need to be automaticly initialized by the
1830 compiler. */
1831 static bool
1832 is_var_need_auto_init (tree decl)
1834 if (auto_var_p (decl)
1835 && (TREE_CODE (decl) != VAR_DECL
1836 || !DECL_HARD_REGISTER (decl))
1837 && (flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
1838 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl)))
1839 && !OPAQUE_TYPE_P (TREE_TYPE (decl))
1840 && !is_empty_type (TREE_TYPE (decl)))
1841 return true;
1842 return false;
1845 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1846 and initialization explicit. */
1848 static enum gimplify_status
1849 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1851 tree stmt = *stmt_p;
1852 tree decl = DECL_EXPR_DECL (stmt);
1854 *stmt_p = NULL_TREE;
1856 if (TREE_TYPE (decl) == error_mark_node)
1857 return GS_ERROR;
1859 if ((TREE_CODE (decl) == TYPE_DECL
1860 || VAR_P (decl))
1861 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1863 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1864 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1865 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1868 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1869 in case its size expressions contain problematic nodes like CALL_EXPR. */
1870 if (TREE_CODE (decl) == TYPE_DECL
1871 && DECL_ORIGINAL_TYPE (decl)
1872 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1874 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1875 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1876 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1879 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1881 tree init = DECL_INITIAL (decl);
1882 bool is_vla = false;
1883 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
1884 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
1885 If the decl has VALUE_EXPR that was created by FE (usually
1886 C++FE), it's a proxy varaible, and FE already initialized
1887 the VALUE_EXPR of it, we should not initialize it anymore. */
1888 bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
1890 poly_uint64 size;
1891 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1892 || (!TREE_STATIC (decl)
1893 && flag_stack_check == GENERIC_STACK_CHECK
1894 && maybe_gt (size,
1895 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1897 gimplify_vla_decl (decl, seq_p);
1898 is_vla = true;
1901 if (asan_poisoned_variables
1902 && !is_vla
1903 && TREE_ADDRESSABLE (decl)
1904 && !TREE_STATIC (decl)
1905 && !DECL_HAS_VALUE_EXPR_P (decl)
1906 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1907 && dbg_cnt (asan_use_after_scope)
1908 && !gimplify_omp_ctxp
1909 /* GNAT introduces temporaries to hold return values of calls in
1910 initializers of variables defined in other units, so the
1911 declaration of the variable is discarded completely. We do not
1912 want to issue poison calls for such dropped variables. */
1913 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
1914 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
1916 asan_poisoned_variables->add (decl);
1917 asan_poison_variable (decl, false, seq_p);
1918 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1919 gimplify_ctxp->live_switch_vars->add (decl);
1922 /* Some front ends do not explicitly declare all anonymous
1923 artificial variables. We compensate here by declaring the
1924 variables, though it would be better if the front ends would
1925 explicitly declare them. */
1926 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1927 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1928 gimple_add_tmp_var (decl);
1930 if (init && init != error_mark_node)
1932 if (!TREE_STATIC (decl))
1934 DECL_INITIAL (decl) = NULL_TREE;
1935 init = build2 (INIT_EXPR, void_type_node, decl, init);
1936 gimplify_and_add (init, seq_p);
1937 ggc_free (init);
1938 /* Clear TREE_READONLY if we really have an initialization. */
1939 if (!DECL_INITIAL (decl)
1940 && !omp_privatize_by_reference (decl))
1941 TREE_READONLY (decl) = 0;
1943 else
1944 /* We must still examine initializers for static variables
1945 as they may contain a label address. */
1946 walk_tree (&init, force_labels_r, NULL, NULL);
1948 /* When there is no explicit initializer, if the user requested,
1949 We should insert an artifical initializer for this automatic
1950 variable. */
1951 else if (is_var_need_auto_init (decl)
1952 && !decl_had_value_expr_p)
1954 gimple_add_init_for_auto_var (decl,
1955 flag_auto_var_init,
1956 seq_p);
1957 /* The expanding of a call to the above .DEFERRED_INIT will apply
1958 block initialization to the whole space covered by this variable.
1959 As a result, all the paddings will be initialized to zeroes
1960 for zero initialization and 0xFE byte-repeatable patterns for
1961 pattern initialization.
1962 In order to make the paddings as zeroes for pattern init, We
1963 should add a call to __builtin_clear_padding to clear the
1964 paddings to zero in compatiple with CLANG.
1965 We cannot insert this call if the variable is a gimple register
1966 since __builtin_clear_padding will take the address of the
1967 variable. As a result, if a long double/_Complex long double
1968 variable will spilled into stack later, its padding is 0XFE. */
1969 if (flag_auto_var_init == AUTO_INIT_PATTERN
1970 && !is_gimple_reg (decl)
1971 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
1972 gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
1976 return GS_ALL_DONE;
1979 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1980 and replacing the LOOP_EXPR with goto, but if the loop contains an
1981 EXIT_EXPR, we need to append a label for it to jump to. */
1983 static enum gimplify_status
1984 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1986 tree saved_label = gimplify_ctxp->exit_label;
1987 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1989 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1991 gimplify_ctxp->exit_label = NULL_TREE;
1993 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1995 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1997 if (gimplify_ctxp->exit_label)
1998 gimplify_seq_add_stmt (pre_p,
1999 gimple_build_label (gimplify_ctxp->exit_label));
2001 gimplify_ctxp->exit_label = saved_label;
2003 *expr_p = NULL;
2004 return GS_ALL_DONE;
2007 /* Gimplify a statement list onto a sequence. These may be created either
2008 by an enlightened front-end, or by shortcut_cond_expr. */
2010 static enum gimplify_status
2011 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2013 tree temp = voidify_wrapper_expr (*expr_p, NULL);
2015 tree_stmt_iterator i = tsi_start (*expr_p);
2017 while (!tsi_end_p (i))
2019 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2020 tsi_delink (&i);
2023 if (temp)
2025 *expr_p = temp;
2026 return GS_OK;
2029 return GS_ALL_DONE;
2033 /* Emit warning for the unreachable statment STMT if needed.
2034 Return the gimple itself when the warning is emitted, otherwise
2035 return NULL. */
2036 static gimple *
2037 emit_warn_switch_unreachable (gimple *stmt)
2039 if (gimple_code (stmt) == GIMPLE_GOTO
2040 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2041 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2042 /* Don't warn for compiler-generated gotos. These occur
2043 in Duff's devices, for example. */
2044 return NULL;
2045 else if ((flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2046 && ((gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2047 || (gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2048 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2049 || (is_gimple_assign (stmt)
2050 && gimple_assign_single_p (stmt)
2051 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2052 && gimple_call_internal_p (
2053 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)),
2054 IFN_DEFERRED_INIT))))
2055 /* Don't warn for compiler-generated initializations for
2056 -ftrivial-auto-var-init.
2057 There are 3 cases:
2058 case 1: a call to .DEFERRED_INIT;
2059 case 2: a call to __builtin_clear_padding with the 2nd argument is
2060 present and non-zero;
2061 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2062 that has the LHS of .DEFERRED_INIT as the RHS as following:
2063 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2064 i1 = _1. */
2065 return NULL;
2066 else
2067 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
2068 "statement will never be executed");
2069 return stmt;
2072 /* Callback for walk_gimple_seq. */
2074 static tree
2075 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2076 bool *handled_ops_p,
2077 struct walk_stmt_info *wi)
2079 gimple *stmt = gsi_stmt (*gsi_p);
2080 bool unreachable_issued = wi->info != NULL;
2082 *handled_ops_p = true;
2083 switch (gimple_code (stmt))
2085 case GIMPLE_TRY:
2086 /* A compiler-generated cleanup or a user-written try block.
2087 If it's empty, don't dive into it--that would result in
2088 worse location info. */
2089 if (gimple_try_eval (stmt) == NULL)
2091 if (warn_switch_unreachable && !unreachable_issued)
2092 wi->info = emit_warn_switch_unreachable (stmt);
2094 /* Stop when auto var init warning is not on. */
2095 if (!warn_trivial_auto_var_init)
2096 return integer_zero_node;
2098 /* Fall through. */
2099 case GIMPLE_BIND:
2100 case GIMPLE_CATCH:
2101 case GIMPLE_EH_FILTER:
2102 case GIMPLE_TRANSACTION:
2103 /* Walk the sub-statements. */
2104 *handled_ops_p = false;
2105 break;
2107 case GIMPLE_DEBUG:
2108 /* Ignore these. We may generate them before declarations that
2109 are never executed. If there's something to warn about,
2110 there will be non-debug stmts too, and we'll catch those. */
2111 break;
2113 case GIMPLE_LABEL:
2114 /* Stop till the first Label. */
2115 return integer_zero_node;
2116 case GIMPLE_CALL:
2117 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2119 *handled_ops_p = false;
2120 break;
2122 if (warn_trivial_auto_var_init
2123 && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2124 && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2126 /* Get the variable name from the 3rd argument of call. */
2127 tree var_name = gimple_call_arg (stmt, 2);
2128 var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2129 const char *var_name_str = TREE_STRING_POINTER (var_name);
2131 warning_at (gimple_location (stmt), OPT_Wtrivial_auto_var_init,
2132 "%qs cannot be initialized with"
2133 "%<-ftrivial-auto-var_init%>",
2134 var_name_str);
2135 break;
2138 /* Fall through. */
2139 default:
2140 /* check the first "real" statement (not a decl/lexical scope/...), issue
2141 warning if needed. */
2142 if (warn_switch_unreachable && !unreachable_issued)
2143 wi->info = emit_warn_switch_unreachable (stmt);
2144 /* Stop when auto var init warning is not on. */
2145 if (!warn_trivial_auto_var_init)
2146 return integer_zero_node;
2147 break;
2149 return NULL_TREE;
2153 /* Possibly warn about unreachable statements between switch's controlling
2154 expression and the first case. Also warn about -ftrivial-auto-var-init
2155 cannot initialize the auto variable under such situation.
2156 SEQ is the body of a switch expression. */
2158 static void
2159 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2161 if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2162 /* This warning doesn't play well with Fortran when optimizations
2163 are on. */
2164 || lang_GNU_Fortran ()
2165 || seq == NULL)
2166 return;
2168 struct walk_stmt_info wi;
2170 memset (&wi, 0, sizeof (wi));
2171 walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2175 /* A label entry that pairs label and a location. */
2176 struct label_entry
2178 tree label;
2179 location_t loc;
2182 /* Find LABEL in vector of label entries VEC. */
2184 static struct label_entry *
2185 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2187 unsigned int i;
2188 struct label_entry *l;
2190 FOR_EACH_VEC_ELT (*vec, i, l)
2191 if (l->label == label)
2192 return l;
2193 return NULL;
2196 /* Return true if LABEL, a LABEL_DECL, represents a case label
2197 in a vector of labels CASES. */
2199 static bool
2200 case_label_p (const vec<tree> *cases, tree label)
2202 unsigned int i;
2203 tree l;
2205 FOR_EACH_VEC_ELT (*cases, i, l)
2206 if (CASE_LABEL (l) == label)
2207 return true;
2208 return false;
2211 /* Find the last nondebug statement in a scope STMT. */
2213 static gimple *
2214 last_stmt_in_scope (gimple *stmt)
2216 if (!stmt)
2217 return NULL;
2219 switch (gimple_code (stmt))
2221 case GIMPLE_BIND:
2223 gbind *bind = as_a <gbind *> (stmt);
2224 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2225 return last_stmt_in_scope (stmt);
2228 case GIMPLE_TRY:
2230 gtry *try_stmt = as_a <gtry *> (stmt);
2231 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2232 gimple *last_eval = last_stmt_in_scope (stmt);
2233 if (gimple_stmt_may_fallthru (last_eval)
2234 && (last_eval == NULL
2235 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2236 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2238 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2239 return last_stmt_in_scope (stmt);
2241 else
2242 return last_eval;
2245 case GIMPLE_DEBUG:
2246 gcc_unreachable ();
2248 default:
2249 return stmt;
2253 /* Collect labels that may fall through into LABELS and return the statement
2254 preceding another case label, or a user-defined label. Store a location
2255 useful to give warnings at *PREVLOC (usually the location of the returned
2256 statement or of its surrounding scope). */
2258 static gimple *
2259 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2260 auto_vec <struct label_entry> *labels,
2261 location_t *prevloc)
2263 gimple *prev = NULL;
2265 *prevloc = UNKNOWN_LOCATION;
2268 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2270 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2271 which starts on a GIMPLE_SWITCH and ends with a break label.
2272 Handle that as a single statement that can fall through. */
2273 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2274 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2275 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2276 if (last
2277 && gimple_code (first) == GIMPLE_SWITCH
2278 && gimple_code (last) == GIMPLE_LABEL)
2280 tree label = gimple_label_label (as_a <glabel *> (last));
2281 if (SWITCH_BREAK_LABEL_P (label))
2283 prev = bind;
2284 gsi_next (gsi_p);
2285 continue;
2289 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2290 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2292 /* Nested scope. Only look at the last statement of
2293 the innermost scope. */
2294 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2295 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2296 if (last)
2298 prev = last;
2299 /* It might be a label without a location. Use the
2300 location of the scope then. */
2301 if (!gimple_has_location (prev))
2302 *prevloc = bind_loc;
2304 gsi_next (gsi_p);
2305 continue;
2308 /* Ifs are tricky. */
2309 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2311 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2312 tree false_lab = gimple_cond_false_label (cond_stmt);
2313 location_t if_loc = gimple_location (cond_stmt);
2315 /* If we have e.g.
2316 if (i > 1) goto <D.2259>; else goto D;
2317 we can't do much with the else-branch. */
2318 if (!DECL_ARTIFICIAL (false_lab))
2319 break;
2321 /* Go on until the false label, then one step back. */
2322 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2324 gimple *stmt = gsi_stmt (*gsi_p);
2325 if (gimple_code (stmt) == GIMPLE_LABEL
2326 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2327 break;
2330 /* Not found? Oops. */
2331 if (gsi_end_p (*gsi_p))
2332 break;
2334 /* A dead label can't fall through. */
2335 if (!UNUSED_LABEL_P (false_lab))
2337 struct label_entry l = { false_lab, if_loc };
2338 labels->safe_push (l);
2341 /* Go to the last statement of the then branch. */
2342 gsi_prev (gsi_p);
2344 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2345 <D.1759>:
2346 <stmt>;
2347 goto <D.1761>;
2348 <D.1760>:
2350 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2351 && !gimple_has_location (gsi_stmt (*gsi_p)))
2353 /* Look at the statement before, it might be
2354 attribute fallthrough, in which case don't warn. */
2355 gsi_prev (gsi_p);
2356 bool fallthru_before_dest
2357 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2358 gsi_next (gsi_p);
2359 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2360 if (!fallthru_before_dest)
2362 struct label_entry l = { goto_dest, if_loc };
2363 labels->safe_push (l);
2366 /* This case is about
2367 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2368 <D.2022>:
2369 n = n + 1; // #1
2370 <D.2023>: // #2
2371 <D.1988>: // #3
2372 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2373 through to #3. So set PREV to #1. */
2374 else if (UNUSED_LABEL_P (false_lab))
2375 prev = gsi_stmt (*gsi_p);
2377 /* And move back. */
2378 gsi_next (gsi_p);
2381 /* Remember the last statement. Skip labels that are of no interest
2382 to us. */
2383 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2385 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2386 if (find_label_entry (labels, label))
2387 prev = gsi_stmt (*gsi_p);
2389 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2391 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2393 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2394 prev = gsi_stmt (*gsi_p);
2395 gsi_next (gsi_p);
2397 while (!gsi_end_p (*gsi_p)
2398 /* Stop if we find a case or a user-defined label. */
2399 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2400 || !gimple_has_location (gsi_stmt (*gsi_p))));
2402 if (prev && gimple_has_location (prev))
2403 *prevloc = gimple_location (prev);
2404 return prev;
2407 /* Return true if the switch fallthough warning should occur. LABEL is
2408 the label statement that we're falling through to. */
2410 static bool
2411 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2413 gimple_stmt_iterator gsi = *gsi_p;
2415 /* Don't warn if the label is marked with a "falls through" comment. */
2416 if (FALLTHROUGH_LABEL_P (label))
2417 return false;
2419 /* Don't warn for non-case labels followed by a statement:
2420 case 0:
2421 foo ();
2422 label:
2423 bar ();
2424 as these are likely intentional. */
2425 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2427 tree l;
2428 while (!gsi_end_p (gsi)
2429 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2430 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2431 && !case_label_p (&gimplify_ctxp->case_labels, l))
2432 gsi_next_nondebug (&gsi);
2433 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2434 return false;
2437 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2438 immediately breaks. */
2439 gsi = *gsi_p;
2441 /* Skip all immediately following labels. */
2442 while (!gsi_end_p (gsi)
2443 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2444 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2445 gsi_next_nondebug (&gsi);
2447 /* { ... something; default:; } */
2448 if (gsi_end_p (gsi)
2449 /* { ... something; default: break; } or
2450 { ... something; default: goto L; } */
2451 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2452 /* { ... something; default: return; } */
2453 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2454 return false;
2456 return true;
2459 /* Callback for walk_gimple_seq. */
2461 static tree
2462 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2463 struct walk_stmt_info *)
2465 gimple *stmt = gsi_stmt (*gsi_p);
2467 *handled_ops_p = true;
2468 switch (gimple_code (stmt))
2470 case GIMPLE_TRY:
2471 case GIMPLE_BIND:
2472 case GIMPLE_CATCH:
2473 case GIMPLE_EH_FILTER:
2474 case GIMPLE_TRANSACTION:
2475 /* Walk the sub-statements. */
2476 *handled_ops_p = false;
2477 break;
2479 /* Find a sequence of form:
2481 GIMPLE_LABEL
2482 [...]
2483 <may fallthru stmt>
2484 GIMPLE_LABEL
2486 and possibly warn. */
2487 case GIMPLE_LABEL:
2489 /* Found a label. Skip all immediately following labels. */
2490 while (!gsi_end_p (*gsi_p)
2491 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2492 gsi_next_nondebug (gsi_p);
2494 /* There might be no more statements. */
2495 if (gsi_end_p (*gsi_p))
2496 return integer_zero_node;
2498 /* Vector of labels that fall through. */
2499 auto_vec <struct label_entry> labels;
2500 location_t prevloc;
2501 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2503 /* There might be no more statements. */
2504 if (gsi_end_p (*gsi_p))
2505 return integer_zero_node;
2507 gimple *next = gsi_stmt (*gsi_p);
2508 tree label;
2509 /* If what follows is a label, then we may have a fallthrough. */
2510 if (gimple_code (next) == GIMPLE_LABEL
2511 && gimple_has_location (next)
2512 && (label = gimple_label_label (as_a <glabel *> (next)))
2513 && prev != NULL)
2515 struct label_entry *l;
2516 bool warned_p = false;
2517 auto_diagnostic_group d;
2518 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2519 /* Quiet. */;
2520 else if (gimple_code (prev) == GIMPLE_LABEL
2521 && (label = gimple_label_label (as_a <glabel *> (prev)))
2522 && (l = find_label_entry (&labels, label)))
2523 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2524 "this statement may fall through");
2525 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2526 /* Try to be clever and don't warn when the statement
2527 can't actually fall through. */
2528 && gimple_stmt_may_fallthru (prev)
2529 && prevloc != UNKNOWN_LOCATION)
2530 warned_p = warning_at (prevloc,
2531 OPT_Wimplicit_fallthrough_,
2532 "this statement may fall through");
2533 if (warned_p)
2534 inform (gimple_location (next), "here");
2536 /* Mark this label as processed so as to prevent multiple
2537 warnings in nested switches. */
2538 FALLTHROUGH_LABEL_P (label) = true;
2540 /* So that next warn_implicit_fallthrough_r will start looking for
2541 a new sequence starting with this label. */
2542 gsi_prev (gsi_p);
2545 break;
2546 default:
2547 break;
2549 return NULL_TREE;
2552 /* Warn when a switch case falls through. */
2554 static void
2555 maybe_warn_implicit_fallthrough (gimple_seq seq)
2557 if (!warn_implicit_fallthrough)
2558 return;
2560 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2561 if (!(lang_GNU_C ()
2562 || lang_GNU_CXX ()
2563 || lang_GNU_OBJC ()))
2564 return;
2566 struct walk_stmt_info wi;
2567 memset (&wi, 0, sizeof (wi));
2568 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2571 /* Callback for walk_gimple_seq. */
2573 static tree
2574 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2575 struct walk_stmt_info *wi)
2577 gimple *stmt = gsi_stmt (*gsi_p);
2579 *handled_ops_p = true;
2580 switch (gimple_code (stmt))
2582 case GIMPLE_TRY:
2583 case GIMPLE_BIND:
2584 case GIMPLE_CATCH:
2585 case GIMPLE_EH_FILTER:
2586 case GIMPLE_TRANSACTION:
2587 /* Walk the sub-statements. */
2588 *handled_ops_p = false;
2589 break;
2590 case GIMPLE_CALL:
2591 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2593 gsi_remove (gsi_p, true);
2594 if (gsi_end_p (*gsi_p))
2596 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2597 return integer_zero_node;
2600 bool found = false;
2601 location_t loc = gimple_location (stmt);
2603 gimple_stmt_iterator gsi2 = *gsi_p;
2604 stmt = gsi_stmt (gsi2);
2605 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2607 /* Go on until the artificial label. */
2608 tree goto_dest = gimple_goto_dest (stmt);
2609 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2611 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2612 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2613 == goto_dest)
2614 break;
2617 /* Not found? Stop. */
2618 if (gsi_end_p (gsi2))
2619 break;
2621 /* Look one past it. */
2622 gsi_next (&gsi2);
2625 /* We're looking for a case label or default label here. */
2626 while (!gsi_end_p (gsi2))
2628 stmt = gsi_stmt (gsi2);
2629 if (gimple_code (stmt) == GIMPLE_LABEL)
2631 tree label = gimple_label_label (as_a <glabel *> (stmt));
2632 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2634 found = true;
2635 break;
2638 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2640 else if (!is_gimple_debug (stmt))
2641 /* Anything else is not expected. */
2642 break;
2643 gsi_next (&gsi2);
2645 if (!found)
2646 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2647 "a case label or default label");
2649 break;
2650 default:
2651 break;
2653 return NULL_TREE;
2656 /* Expand all FALLTHROUGH () calls in SEQ. */
2658 static void
2659 expand_FALLTHROUGH (gimple_seq *seq_p)
2661 struct walk_stmt_info wi;
2662 location_t loc;
2663 memset (&wi, 0, sizeof (wi));
2664 wi.info = (void *) &loc;
2665 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2666 if (wi.callback_result == integer_zero_node)
2667 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2668 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2669 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2670 "a case label or default label");
2674 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2675 branch to. */
2677 static enum gimplify_status
2678 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2680 tree switch_expr = *expr_p;
2681 gimple_seq switch_body_seq = NULL;
2682 enum gimplify_status ret;
2683 tree index_type = TREE_TYPE (switch_expr);
2684 if (index_type == NULL_TREE)
2685 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2687 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2688 fb_rvalue);
2689 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2690 return ret;
2692 if (SWITCH_BODY (switch_expr))
2694 vec<tree> labels;
2695 vec<tree> saved_labels;
2696 hash_set<tree> *saved_live_switch_vars = NULL;
2697 tree default_case = NULL_TREE;
2698 gswitch *switch_stmt;
2700 /* Save old labels, get new ones from body, then restore the old
2701 labels. Save all the things from the switch body to append after. */
2702 saved_labels = gimplify_ctxp->case_labels;
2703 gimplify_ctxp->case_labels.create (8);
2705 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2706 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2707 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2708 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2709 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2710 else
2711 gimplify_ctxp->live_switch_vars = NULL;
2713 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2714 gimplify_ctxp->in_switch_expr = true;
2716 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2718 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2719 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq);
2720 maybe_warn_implicit_fallthrough (switch_body_seq);
2721 /* Only do this for the outermost GIMPLE_SWITCH. */
2722 if (!gimplify_ctxp->in_switch_expr)
2723 expand_FALLTHROUGH (&switch_body_seq);
2725 labels = gimplify_ctxp->case_labels;
2726 gimplify_ctxp->case_labels = saved_labels;
2728 if (gimplify_ctxp->live_switch_vars)
2730 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2731 delete gimplify_ctxp->live_switch_vars;
2733 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2735 preprocess_case_label_vec_for_gimple (labels, index_type,
2736 &default_case);
2738 bool add_bind = false;
2739 if (!default_case)
2741 glabel *new_default;
2743 default_case
2744 = build_case_label (NULL_TREE, NULL_TREE,
2745 create_artificial_label (UNKNOWN_LOCATION));
2746 if (old_in_switch_expr)
2748 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2749 add_bind = true;
2751 new_default = gimple_build_label (CASE_LABEL (default_case));
2752 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2754 else if (old_in_switch_expr)
2756 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2757 if (last && gimple_code (last) == GIMPLE_LABEL)
2759 tree label = gimple_label_label (as_a <glabel *> (last));
2760 if (SWITCH_BREAK_LABEL_P (label))
2761 add_bind = true;
2765 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2766 default_case, labels);
2767 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2768 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2769 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2770 so that we can easily find the start and end of the switch
2771 statement. */
2772 if (add_bind)
2774 gimple_seq bind_body = NULL;
2775 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2776 gimple_seq_add_seq (&bind_body, switch_body_seq);
2777 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2778 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2779 gimplify_seq_add_stmt (pre_p, bind);
2781 else
2783 gimplify_seq_add_stmt (pre_p, switch_stmt);
2784 gimplify_seq_add_seq (pre_p, switch_body_seq);
2786 labels.release ();
2788 else
2789 gcc_unreachable ();
2791 return GS_ALL_DONE;
2794 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2796 static enum gimplify_status
2797 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2799 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2800 == current_function_decl);
2802 tree label = LABEL_EXPR_LABEL (*expr_p);
2803 glabel *label_stmt = gimple_build_label (label);
2804 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2805 gimplify_seq_add_stmt (pre_p, label_stmt);
2807 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2808 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2809 NOT_TAKEN));
2810 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2811 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2812 TAKEN));
2814 return GS_ALL_DONE;
2817 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2819 static enum gimplify_status
2820 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2822 struct gimplify_ctx *ctxp;
2823 glabel *label_stmt;
2825 /* Invalid programs can play Duff's Device type games with, for example,
2826 #pragma omp parallel. At least in the C front end, we don't
2827 detect such invalid branches until after gimplification, in the
2828 diagnose_omp_blocks pass. */
2829 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2830 if (ctxp->case_labels.exists ())
2831 break;
2833 tree label = CASE_LABEL (*expr_p);
2834 label_stmt = gimple_build_label (label);
2835 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2836 ctxp->case_labels.safe_push (*expr_p);
2837 gimplify_seq_add_stmt (pre_p, label_stmt);
2839 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2840 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2841 NOT_TAKEN));
2842 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2843 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2844 TAKEN));
2846 return GS_ALL_DONE;
2849 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2850 if necessary. */
2852 tree
2853 build_and_jump (tree *label_p)
2855 if (label_p == NULL)
2856 /* If there's nowhere to jump, just fall through. */
2857 return NULL_TREE;
2859 if (*label_p == NULL_TREE)
2861 tree label = create_artificial_label (UNKNOWN_LOCATION);
2862 *label_p = label;
2865 return build1 (GOTO_EXPR, void_type_node, *label_p);
2868 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2869 This also involves building a label to jump to and communicating it to
2870 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2872 static enum gimplify_status
2873 gimplify_exit_expr (tree *expr_p)
2875 tree cond = TREE_OPERAND (*expr_p, 0);
2876 tree expr;
2878 expr = build_and_jump (&gimplify_ctxp->exit_label);
2879 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2880 *expr_p = expr;
2882 return GS_OK;
2885 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2886 different from its canonical type, wrap the whole thing inside a
2887 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2888 type.
2890 The canonical type of a COMPONENT_REF is the type of the field being
2891 referenced--unless the field is a bit-field which can be read directly
2892 in a smaller mode, in which case the canonical type is the
2893 sign-appropriate type corresponding to that mode. */
2895 static void
2896 canonicalize_component_ref (tree *expr_p)
2898 tree expr = *expr_p;
2899 tree type;
2901 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2903 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2904 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2905 else
2906 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2908 /* One could argue that all the stuff below is not necessary for
2909 the non-bitfield case and declare it a FE error if type
2910 adjustment would be needed. */
2911 if (TREE_TYPE (expr) != type)
2913 #ifdef ENABLE_TYPES_CHECKING
2914 tree old_type = TREE_TYPE (expr);
2915 #endif
2916 int type_quals;
2918 /* We need to preserve qualifiers and propagate them from
2919 operand 0. */
2920 type_quals = TYPE_QUALS (type)
2921 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2922 if (TYPE_QUALS (type) != type_quals)
2923 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2925 /* Set the type of the COMPONENT_REF to the underlying type. */
2926 TREE_TYPE (expr) = type;
2928 #ifdef ENABLE_TYPES_CHECKING
2929 /* It is now a FE error, if the conversion from the canonical
2930 type to the original expression type is not useless. */
2931 gcc_assert (useless_type_conversion_p (old_type, type));
2932 #endif
2936 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2937 to foo, embed that change in the ADDR_EXPR by converting
2938 T array[U];
2939 (T *)&array
2941 &array[L]
2942 where L is the lower bound. For simplicity, only do this for constant
2943 lower bound.
2944 The constraint is that the type of &array[L] is trivially convertible
2945 to T *. */
2947 static void
2948 canonicalize_addr_expr (tree *expr_p)
2950 tree expr = *expr_p;
2951 tree addr_expr = TREE_OPERAND (expr, 0);
2952 tree datype, ddatype, pddatype;
2954 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2955 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2956 || TREE_CODE (addr_expr) != ADDR_EXPR)
2957 return;
2959 /* The addr_expr type should be a pointer to an array. */
2960 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2961 if (TREE_CODE (datype) != ARRAY_TYPE)
2962 return;
2964 /* The pointer to element type shall be trivially convertible to
2965 the expression pointer type. */
2966 ddatype = TREE_TYPE (datype);
2967 pddatype = build_pointer_type (ddatype);
2968 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2969 pddatype))
2970 return;
2972 /* The lower bound and element sizes must be constant. */
2973 if (!TYPE_SIZE_UNIT (ddatype)
2974 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2975 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2976 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2977 return;
2979 /* All checks succeeded. Build a new node to merge the cast. */
2980 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2981 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2982 NULL_TREE, NULL_TREE);
2983 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2985 /* We can have stripped a required restrict qualifier above. */
2986 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2987 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2990 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2991 underneath as appropriate. */
2993 static enum gimplify_status
2994 gimplify_conversion (tree *expr_p)
2996 location_t loc = EXPR_LOCATION (*expr_p);
2997 gcc_assert (CONVERT_EXPR_P (*expr_p));
2999 /* Then strip away all but the outermost conversion. */
3000 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3002 /* And remove the outermost conversion if it's useless. */
3003 if (tree_ssa_useless_type_conversion (*expr_p))
3004 *expr_p = TREE_OPERAND (*expr_p, 0);
3006 /* If we still have a conversion at the toplevel,
3007 then canonicalize some constructs. */
3008 if (CONVERT_EXPR_P (*expr_p))
3010 tree sub = TREE_OPERAND (*expr_p, 0);
3012 /* If a NOP conversion is changing the type of a COMPONENT_REF
3013 expression, then canonicalize its type now in order to expose more
3014 redundant conversions. */
3015 if (TREE_CODE (sub) == COMPONENT_REF)
3016 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
3018 /* If a NOP conversion is changing a pointer to array of foo
3019 to a pointer to foo, embed that change in the ADDR_EXPR. */
3020 else if (TREE_CODE (sub) == ADDR_EXPR)
3021 canonicalize_addr_expr (expr_p);
3024 /* If we have a conversion to a non-register type force the
3025 use of a VIEW_CONVERT_EXPR instead. */
3026 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3027 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3028 TREE_OPERAND (*expr_p, 0));
3030 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3031 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3032 TREE_SET_CODE (*expr_p, NOP_EXPR);
3034 return GS_OK;
3037 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3038 DECL_VALUE_EXPR, and it's worth re-examining things. */
3040 static enum gimplify_status
3041 gimplify_var_or_parm_decl (tree *expr_p)
3043 tree decl = *expr_p;
3045 /* ??? If this is a local variable, and it has not been seen in any
3046 outer BIND_EXPR, then it's probably the result of a duplicate
3047 declaration, for which we've already issued an error. It would
3048 be really nice if the front end wouldn't leak these at all.
3049 Currently the only known culprit is C++ destructors, as seen
3050 in g++.old-deja/g++.jason/binding.C.
3051 Another possible culpit are size expressions for variably modified
3052 types which are lost in the FE or not gimplified correctly. */
3053 if (VAR_P (decl)
3054 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3055 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3056 && decl_function_context (decl) == current_function_decl)
3058 gcc_assert (seen_error ());
3059 return GS_ERROR;
3062 /* When within an OMP context, notice uses of variables. */
3063 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3064 return GS_ALL_DONE;
3066 /* If the decl is an alias for another expression, substitute it now. */
3067 if (DECL_HAS_VALUE_EXPR_P (decl))
3069 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3070 return GS_OK;
3073 return GS_ALL_DONE;
3076 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3078 static void
3079 recalculate_side_effects (tree t)
3081 enum tree_code code = TREE_CODE (t);
3082 int len = TREE_OPERAND_LENGTH (t);
3083 int i;
3085 switch (TREE_CODE_CLASS (code))
3087 case tcc_expression:
3088 switch (code)
3090 case INIT_EXPR:
3091 case MODIFY_EXPR:
3092 case VA_ARG_EXPR:
3093 case PREDECREMENT_EXPR:
3094 case PREINCREMENT_EXPR:
3095 case POSTDECREMENT_EXPR:
3096 case POSTINCREMENT_EXPR:
3097 /* All of these have side-effects, no matter what their
3098 operands are. */
3099 return;
3101 default:
3102 break;
3104 /* Fall through. */
3106 case tcc_comparison: /* a comparison expression */
3107 case tcc_unary: /* a unary arithmetic expression */
3108 case tcc_binary: /* a binary arithmetic expression */
3109 case tcc_reference: /* a reference */
3110 case tcc_vl_exp: /* a function call */
3111 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3112 for (i = 0; i < len; ++i)
3114 tree op = TREE_OPERAND (t, i);
3115 if (op && TREE_SIDE_EFFECTS (op))
3116 TREE_SIDE_EFFECTS (t) = 1;
3118 break;
3120 case tcc_constant:
3121 /* No side-effects. */
3122 return;
3124 default:
3125 gcc_unreachable ();
3129 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3130 node *EXPR_P.
3132 compound_lval
3133 : min_lval '[' val ']'
3134 | min_lval '.' ID
3135 | compound_lval '[' val ']'
3136 | compound_lval '.' ID
3138 This is not part of the original SIMPLE definition, which separates
3139 array and member references, but it seems reasonable to handle them
3140 together. Also, this way we don't run into problems with union
3141 aliasing; gcc requires that for accesses through a union to alias, the
3142 union reference must be explicit, which was not always the case when we
3143 were splitting up array and member refs.
3145 PRE_P points to the sequence where side effects that must happen before
3146 *EXPR_P should be stored.
3148 POST_P points to the sequence where side effects that must happen after
3149 *EXPR_P should be stored. */
3151 static enum gimplify_status
3152 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3153 fallback_t fallback)
3155 tree *p;
3156 enum gimplify_status ret = GS_ALL_DONE, tret;
3157 int i;
3158 location_t loc = EXPR_LOCATION (*expr_p);
3159 tree expr = *expr_p;
3161 /* Create a stack of the subexpressions so later we can walk them in
3162 order from inner to outer. */
3163 auto_vec<tree, 10> expr_stack;
3165 /* We can handle anything that get_inner_reference can deal with. */
3166 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3168 restart:
3169 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3170 if (TREE_CODE (*p) == INDIRECT_REF)
3171 *p = fold_indirect_ref_loc (loc, *p);
3173 if (handled_component_p (*p))
3175 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3176 additional COMPONENT_REFs. */
3177 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3178 && gimplify_var_or_parm_decl (p) == GS_OK)
3179 goto restart;
3180 else
3181 break;
3183 expr_stack.safe_push (*p);
3186 gcc_assert (expr_stack.length ());
3188 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3189 walked through and P points to the innermost expression.
3191 Java requires that we elaborated nodes in source order. That
3192 means we must gimplify the inner expression followed by each of
3193 the indices, in order. But we can't gimplify the inner
3194 expression until we deal with any variable bounds, sizes, or
3195 positions in order to deal with PLACEHOLDER_EXPRs.
3197 The base expression may contain a statement expression that
3198 has declarations used in size expressions, so has to be
3199 gimplified before gimplifying the size expressions.
3201 So we do this in three steps. First we deal with variable
3202 bounds, sizes, and positions, then we gimplify the base and
3203 ensure it is memory if needed, then we deal with the annotations
3204 for any variables in the components and any indices, from left
3205 to right. */
3207 bool need_non_reg = false;
3208 for (i = expr_stack.length () - 1; i >= 0; i--)
3210 tree t = expr_stack[i];
3212 if (error_operand_p (TREE_OPERAND (t, 0)))
3213 return GS_ERROR;
3215 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3217 /* Deal with the low bound and element type size and put them into
3218 the ARRAY_REF. If these values are set, they have already been
3219 gimplified. */
3220 if (TREE_OPERAND (t, 2) == NULL_TREE)
3222 tree low = unshare_expr (array_ref_low_bound (t));
3223 if (!is_gimple_min_invariant (low))
3225 TREE_OPERAND (t, 2) = low;
3229 if (TREE_OPERAND (t, 3) == NULL_TREE)
3231 tree elmt_size = array_ref_element_size (t);
3232 if (!is_gimple_min_invariant (elmt_size))
3234 elmt_size = unshare_expr (elmt_size);
3235 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3236 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3238 /* Divide the element size by the alignment of the element
3239 type (above). */
3240 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3241 elmt_size, factor);
3243 TREE_OPERAND (t, 3) = elmt_size;
3246 need_non_reg = true;
3248 else if (TREE_CODE (t) == COMPONENT_REF)
3250 /* Set the field offset into T and gimplify it. */
3251 if (TREE_OPERAND (t, 2) == NULL_TREE)
3253 tree offset = component_ref_field_offset (t);
3254 if (!is_gimple_min_invariant (offset))
3256 offset = unshare_expr (offset);
3257 tree field = TREE_OPERAND (t, 1);
3258 tree factor
3259 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3261 /* Divide the offset by its alignment. */
3262 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3263 offset, factor);
3265 TREE_OPERAND (t, 2) = offset;
3268 need_non_reg = true;
3270 else if (!is_gimple_reg_type (TREE_TYPE (t)))
3271 /* When the result of an operation, in particular a VIEW_CONVERT_EXPR
3272 is a non-register type then require the base object to be a
3273 non-register as well. */
3274 need_non_reg = true;
3277 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3278 so as to match the min_lval predicate. Failure to do so may result
3279 in the creation of large aggregate temporaries. */
3280 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3281 fallback | fb_lvalue);
3282 ret = MIN (ret, tret);
3283 if (ret == GS_ERROR)
3284 return GS_ERROR;
3286 /* Step 2a: if we have component references we do not support on
3287 registers then make sure the base isn't a register. Of course
3288 we can only do so if an rvalue is OK. */
3289 if (need_non_reg && (fallback & fb_rvalue))
3290 prepare_gimple_addressable (p, pre_p);
3293 /* Step 3: gimplify size expressions and the indices and operands of
3294 ARRAY_REF. During this loop we also remove any useless conversions.
3295 If we operate on a register also make sure to properly gimplify
3296 to individual operations. */
3298 bool reg_operations = is_gimple_reg (*p);
3299 for (; expr_stack.length () > 0; )
3301 tree t = expr_stack.pop ();
3303 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3305 gcc_assert (!reg_operations);
3307 /* Gimplify the low bound and element type size. */
3308 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3309 is_gimple_reg, fb_rvalue);
3310 ret = MIN (ret, tret);
3312 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3313 is_gimple_reg, fb_rvalue);
3314 ret = MIN (ret, tret);
3316 /* Gimplify the dimension. */
3317 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3318 is_gimple_val, fb_rvalue);
3319 ret = MIN (ret, tret);
3321 else if (TREE_CODE (t) == COMPONENT_REF)
3323 gcc_assert (!reg_operations);
3325 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3326 is_gimple_reg, fb_rvalue);
3327 ret = MIN (ret, tret);
3329 else if (reg_operations)
3331 tret = gimplify_expr (&TREE_OPERAND (t, 0), pre_p, post_p,
3332 is_gimple_val, fb_rvalue);
3333 ret = MIN (ret, tret);
3336 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3338 /* The innermost expression P may have originally had
3339 TREE_SIDE_EFFECTS set which would have caused all the outer
3340 expressions in *EXPR_P leading to P to also have had
3341 TREE_SIDE_EFFECTS set. */
3342 recalculate_side_effects (t);
3345 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3346 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3348 canonicalize_component_ref (expr_p);
3351 expr_stack.release ();
3353 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3355 return ret;
3358 /* Gimplify the self modifying expression pointed to by EXPR_P
3359 (++, --, +=, -=).
3361 PRE_P points to the list where side effects that must happen before
3362 *EXPR_P should be stored.
3364 POST_P points to the list where side effects that must happen after
3365 *EXPR_P should be stored.
3367 WANT_VALUE is nonzero iff we want to use the value of this expression
3368 in another expression.
3370 ARITH_TYPE is the type the computation should be performed in. */
3372 enum gimplify_status
3373 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3374 bool want_value, tree arith_type)
3376 enum tree_code code;
3377 tree lhs, lvalue, rhs, t1;
3378 gimple_seq post = NULL, *orig_post_p = post_p;
3379 bool postfix;
3380 enum tree_code arith_code;
3381 enum gimplify_status ret;
3382 location_t loc = EXPR_LOCATION (*expr_p);
3384 code = TREE_CODE (*expr_p);
3386 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3387 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3389 /* Prefix or postfix? */
3390 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3391 /* Faster to treat as prefix if result is not used. */
3392 postfix = want_value;
3393 else
3394 postfix = false;
3396 /* For postfix, make sure the inner expression's post side effects
3397 are executed after side effects from this expression. */
3398 if (postfix)
3399 post_p = &post;
3401 /* Add or subtract? */
3402 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3403 arith_code = PLUS_EXPR;
3404 else
3405 arith_code = MINUS_EXPR;
3407 /* Gimplify the LHS into a GIMPLE lvalue. */
3408 lvalue = TREE_OPERAND (*expr_p, 0);
3409 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3410 if (ret == GS_ERROR)
3411 return ret;
3413 /* Extract the operands to the arithmetic operation. */
3414 lhs = lvalue;
3415 rhs = TREE_OPERAND (*expr_p, 1);
3417 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3418 that as the result value and in the postqueue operation. */
3419 if (postfix)
3421 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3422 if (ret == GS_ERROR)
3423 return ret;
3425 lhs = get_initialized_tmp_var (lhs, pre_p);
3428 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3429 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3431 rhs = convert_to_ptrofftype_loc (loc, rhs);
3432 if (arith_code == MINUS_EXPR)
3433 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3434 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3436 else
3437 t1 = fold_convert (TREE_TYPE (*expr_p),
3438 fold_build2 (arith_code, arith_type,
3439 fold_convert (arith_type, lhs),
3440 fold_convert (arith_type, rhs)));
3442 if (postfix)
3444 gimplify_assign (lvalue, t1, pre_p);
3445 gimplify_seq_add_seq (orig_post_p, post);
3446 *expr_p = lhs;
3447 return GS_ALL_DONE;
3449 else
3451 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3452 return GS_OK;
3456 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3458 static void
3459 maybe_with_size_expr (tree *expr_p)
3461 tree expr = *expr_p;
3462 tree type = TREE_TYPE (expr);
3463 tree size;
3465 /* If we've already wrapped this or the type is error_mark_node, we can't do
3466 anything. */
3467 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3468 || type == error_mark_node)
3469 return;
3471 /* If the size isn't known or is a constant, we have nothing to do. */
3472 size = TYPE_SIZE_UNIT (type);
3473 if (!size || poly_int_tree_p (size))
3474 return;
3476 /* Otherwise, make a WITH_SIZE_EXPR. */
3477 size = unshare_expr (size);
3478 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3479 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3482 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3483 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3484 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3485 gimplified to an SSA name. */
3487 enum gimplify_status
3488 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3489 bool allow_ssa)
3491 bool (*test) (tree);
3492 fallback_t fb;
3494 /* In general, we allow lvalues for function arguments to avoid
3495 extra overhead of copying large aggregates out of even larger
3496 aggregates into temporaries only to copy the temporaries to
3497 the argument list. Make optimizers happy by pulling out to
3498 temporaries those types that fit in registers. */
3499 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3500 test = is_gimple_val, fb = fb_rvalue;
3501 else
3503 test = is_gimple_lvalue, fb = fb_either;
3504 /* Also strip a TARGET_EXPR that would force an extra copy. */
3505 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3507 tree init = TARGET_EXPR_INITIAL (*arg_p);
3508 if (init
3509 && !VOID_TYPE_P (TREE_TYPE (init)))
3510 *arg_p = init;
3514 /* If this is a variable sized type, we must remember the size. */
3515 maybe_with_size_expr (arg_p);
3517 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3518 /* Make sure arguments have the same location as the function call
3519 itself. */
3520 protected_set_expr_location (*arg_p, call_location);
3522 /* There is a sequence point before a function call. Side effects in
3523 the argument list must occur before the actual call. So, when
3524 gimplifying arguments, force gimplify_expr to use an internal
3525 post queue which is then appended to the end of PRE_P. */
3526 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3529 /* Don't fold inside offloading or taskreg regions: it can break code by
3530 adding decl references that weren't in the source. We'll do it during
3531 omplower pass instead. */
3533 static bool
3534 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3536 struct gimplify_omp_ctx *ctx;
3537 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3538 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3539 return false;
3540 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3541 return false;
3542 /* Delay folding of builtins until the IL is in consistent state
3543 so the diagnostic machinery can do a better job. */
3544 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3545 return false;
3546 return fold_stmt (gsi);
3549 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3550 WANT_VALUE is true if the result of the call is desired. */
3552 static enum gimplify_status
3553 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3555 tree fndecl, parms, p, fnptrtype;
3556 enum gimplify_status ret;
3557 int i, nargs;
3558 gcall *call;
3559 bool builtin_va_start_p = false;
3560 location_t loc = EXPR_LOCATION (*expr_p);
3562 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3564 /* For reliable diagnostics during inlining, it is necessary that
3565 every call_expr be annotated with file and line. */
3566 if (! EXPR_HAS_LOCATION (*expr_p))
3567 SET_EXPR_LOCATION (*expr_p, input_location);
3569 /* Gimplify internal functions created in the FEs. */
3570 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3572 if (want_value)
3573 return GS_ALL_DONE;
3575 nargs = call_expr_nargs (*expr_p);
3576 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3577 auto_vec<tree> vargs (nargs);
3579 if (ifn == IFN_ASSUME)
3581 if (simple_condition_p (CALL_EXPR_ARG (*expr_p, 0)))
3583 /* If the [[assume (cond)]]; condition is simple
3584 enough and can be evaluated unconditionally
3585 without side-effects, expand it as
3586 if (!cond) __builtin_unreachable (); */
3587 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
3588 *expr_p = build3 (COND_EXPR, void_type_node,
3589 CALL_EXPR_ARG (*expr_p, 0), void_node,
3590 build_call_expr_loc (EXPR_LOCATION (*expr_p),
3591 fndecl, 0));
3592 return GS_OK;
3594 /* If not optimizing, ignore the assumptions. */
3595 if (!optimize || seen_error ())
3597 *expr_p = NULL_TREE;
3598 return GS_ALL_DONE;
3600 /* Temporarily, until gimple lowering, transform
3601 .ASSUME (cond);
3602 into:
3603 [[assume (guard)]]
3605 guard = cond;
3607 such that gimple lowering can outline the condition into
3608 a separate function easily. */
3609 tree guard = create_tmp_var (boolean_type_node);
3610 *expr_p = build2 (MODIFY_EXPR, void_type_node, guard,
3611 gimple_boolify (CALL_EXPR_ARG (*expr_p, 0)));
3612 *expr_p = build3 (BIND_EXPR, void_type_node, NULL, *expr_p, NULL);
3613 push_gimplify_context ();
3614 gimple_seq body = NULL;
3615 gimple *g = gimplify_and_return_first (*expr_p, &body);
3616 pop_gimplify_context (g);
3617 g = gimple_build_assume (guard, body);
3618 gimple_set_location (g, loc);
3619 gimplify_seq_add_stmt (pre_p, g);
3620 *expr_p = NULL_TREE;
3621 return GS_ALL_DONE;
3624 for (i = 0; i < nargs; i++)
3626 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3627 EXPR_LOCATION (*expr_p));
3628 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3631 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3632 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3633 gimplify_seq_add_stmt (pre_p, call);
3634 return GS_ALL_DONE;
3637 /* This may be a call to a builtin function.
3639 Builtin function calls may be transformed into different
3640 (and more efficient) builtin function calls under certain
3641 circumstances. Unfortunately, gimplification can muck things
3642 up enough that the builtin expanders are not aware that certain
3643 transformations are still valid.
3645 So we attempt transformation/gimplification of the call before
3646 we gimplify the CALL_EXPR. At this time we do not manage to
3647 transform all calls in the same manner as the expanders do, but
3648 we do transform most of them. */
3649 fndecl = get_callee_fndecl (*expr_p);
3650 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3651 switch (DECL_FUNCTION_CODE (fndecl))
3653 CASE_BUILT_IN_ALLOCA:
3654 /* If the call has been built for a variable-sized object, then we
3655 want to restore the stack level when the enclosing BIND_EXPR is
3656 exited to reclaim the allocated space; otherwise, we precisely
3657 need to do the opposite and preserve the latest stack level. */
3658 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3659 gimplify_ctxp->save_stack = true;
3660 else
3661 gimplify_ctxp->keep_stack = true;
3662 break;
3664 case BUILT_IN_VA_START:
3666 builtin_va_start_p = TRUE;
3667 if (call_expr_nargs (*expr_p) < 2)
3669 error ("too few arguments to function %<va_start%>");
3670 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3671 return GS_OK;
3674 if (fold_builtin_next_arg (*expr_p, true))
3676 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3677 return GS_OK;
3679 break;
3682 case BUILT_IN_EH_RETURN:
3683 cfun->calls_eh_return = true;
3684 break;
3686 case BUILT_IN_CLEAR_PADDING:
3687 if (call_expr_nargs (*expr_p) == 1)
3689 /* Remember the original type of the argument in an internal
3690 dummy second argument, as in GIMPLE pointer conversions are
3691 useless. Also mark this call as not for automatic
3692 initialization in the internal dummy third argument. */
3693 p = CALL_EXPR_ARG (*expr_p, 0);
3694 *expr_p
3695 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3696 build_zero_cst (TREE_TYPE (p)));
3697 return GS_OK;
3699 break;
3701 default:
3704 if (fndecl && fndecl_built_in_p (fndecl))
3706 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3707 if (new_tree && new_tree != *expr_p)
3709 /* There was a transformation of this call which computes the
3710 same value, but in a more efficient way. Return and try
3711 again. */
3712 *expr_p = new_tree;
3713 return GS_OK;
3717 /* Remember the original function pointer type. */
3718 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3720 if (flag_openmp
3721 && fndecl
3722 && cfun
3723 && (cfun->curr_properties & PROP_gimple_any) == 0)
3725 tree variant = omp_resolve_declare_variant (fndecl);
3726 if (variant != fndecl)
3727 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3730 /* There is a sequence point before the call, so any side effects in
3731 the calling expression must occur before the actual call. Force
3732 gimplify_expr to use an internal post queue. */
3733 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3734 is_gimple_call_addr, fb_rvalue);
3736 if (ret == GS_ERROR)
3737 return GS_ERROR;
3739 nargs = call_expr_nargs (*expr_p);
3741 /* Get argument types for verification. */
3742 fndecl = get_callee_fndecl (*expr_p);
3743 parms = NULL_TREE;
3744 if (fndecl)
3745 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3746 else
3747 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3749 if (fndecl && DECL_ARGUMENTS (fndecl))
3750 p = DECL_ARGUMENTS (fndecl);
3751 else if (parms)
3752 p = parms;
3753 else
3754 p = NULL_TREE;
3755 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3758 /* If the last argument is __builtin_va_arg_pack () and it is not
3759 passed as a named argument, decrease the number of CALL_EXPR
3760 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3761 if (!p
3762 && i < nargs
3763 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3765 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3766 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3768 if (last_arg_fndecl
3769 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3771 tree call = *expr_p;
3773 --nargs;
3774 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3775 CALL_EXPR_FN (call),
3776 nargs, CALL_EXPR_ARGP (call));
3778 /* Copy all CALL_EXPR flags, location and block, except
3779 CALL_EXPR_VA_ARG_PACK flag. */
3780 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3781 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3782 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3783 = CALL_EXPR_RETURN_SLOT_OPT (call);
3784 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3785 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3787 /* Set CALL_EXPR_VA_ARG_PACK. */
3788 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3792 /* If the call returns twice then after building the CFG the call
3793 argument computations will no longer dominate the call because
3794 we add an abnormal incoming edge to the call. So do not use SSA
3795 vars there. */
3796 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3798 /* Gimplify the function arguments. */
3799 if (nargs > 0)
3801 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3802 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3803 PUSH_ARGS_REVERSED ? i-- : i++)
3805 enum gimplify_status t;
3807 /* Avoid gimplifying the second argument to va_start, which needs to
3808 be the plain PARM_DECL. */
3809 if ((i != 1) || !builtin_va_start_p)
3811 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3812 EXPR_LOCATION (*expr_p), ! returns_twice);
3814 if (t == GS_ERROR)
3815 ret = GS_ERROR;
3820 /* Gimplify the static chain. */
3821 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3823 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3824 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3825 else
3827 enum gimplify_status t;
3828 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3829 EXPR_LOCATION (*expr_p), ! returns_twice);
3830 if (t == GS_ERROR)
3831 ret = GS_ERROR;
3835 /* Verify the function result. */
3836 if (want_value && fndecl
3837 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3839 error_at (loc, "using result of function returning %<void%>");
3840 ret = GS_ERROR;
3843 /* Try this again in case gimplification exposed something. */
3844 if (ret != GS_ERROR)
3846 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3848 if (new_tree && new_tree != *expr_p)
3850 /* There was a transformation of this call which computes the
3851 same value, but in a more efficient way. Return and try
3852 again. */
3853 *expr_p = new_tree;
3854 return GS_OK;
3857 else
3859 *expr_p = error_mark_node;
3860 return GS_ERROR;
3863 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3864 decl. This allows us to eliminate redundant or useless
3865 calls to "const" functions. */
3866 if (TREE_CODE (*expr_p) == CALL_EXPR)
3868 int flags = call_expr_flags (*expr_p);
3869 if (flags & (ECF_CONST | ECF_PURE)
3870 /* An infinite loop is considered a side effect. */
3871 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3872 TREE_SIDE_EFFECTS (*expr_p) = 0;
3875 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3876 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3877 form and delegate the creation of a GIMPLE_CALL to
3878 gimplify_modify_expr. This is always possible because when
3879 WANT_VALUE is true, the caller wants the result of this call into
3880 a temporary, which means that we will emit an INIT_EXPR in
3881 internal_get_tmp_var which will then be handled by
3882 gimplify_modify_expr. */
3883 if (!want_value)
3885 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3886 have to do is replicate it as a GIMPLE_CALL tuple. */
3887 gimple_stmt_iterator gsi;
3888 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3889 notice_special_calls (call);
3890 gimplify_seq_add_stmt (pre_p, call);
3891 gsi = gsi_last (*pre_p);
3892 maybe_fold_stmt (&gsi);
3893 *expr_p = NULL_TREE;
3895 else
3896 /* Remember the original function type. */
3897 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3898 CALL_EXPR_FN (*expr_p));
3900 return ret;
3903 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3904 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3906 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3907 condition is true or false, respectively. If null, we should generate
3908 our own to skip over the evaluation of this specific expression.
3910 LOCUS is the source location of the COND_EXPR.
3912 This function is the tree equivalent of do_jump.
3914 shortcut_cond_r should only be called by shortcut_cond_expr. */
3916 static tree
3917 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3918 location_t locus)
3920 tree local_label = NULL_TREE;
3921 tree t, expr = NULL;
3923 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3924 retain the shortcut semantics. Just insert the gotos here;
3925 shortcut_cond_expr will append the real blocks later. */
3926 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3928 location_t new_locus;
3930 /* Turn if (a && b) into
3932 if (a); else goto no;
3933 if (b) goto yes; else goto no;
3934 (no:) */
3936 if (false_label_p == NULL)
3937 false_label_p = &local_label;
3939 /* Keep the original source location on the first 'if'. */
3940 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3941 append_to_statement_list (t, &expr);
3943 /* Set the source location of the && on the second 'if'. */
3944 new_locus = rexpr_location (pred, locus);
3945 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3946 new_locus);
3947 append_to_statement_list (t, &expr);
3949 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3951 location_t new_locus;
3953 /* Turn if (a || b) into
3955 if (a) goto yes;
3956 if (b) goto yes; else goto no;
3957 (yes:) */
3959 if (true_label_p == NULL)
3960 true_label_p = &local_label;
3962 /* Keep the original source location on the first 'if'. */
3963 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3964 append_to_statement_list (t, &expr);
3966 /* Set the source location of the || on the second 'if'. */
3967 new_locus = rexpr_location (pred, locus);
3968 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3969 new_locus);
3970 append_to_statement_list (t, &expr);
3972 else if (TREE_CODE (pred) == COND_EXPR
3973 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3974 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3976 location_t new_locus;
3978 /* As long as we're messing with gotos, turn if (a ? b : c) into
3979 if (a)
3980 if (b) goto yes; else goto no;
3981 else
3982 if (c) goto yes; else goto no;
3984 Don't do this if one of the arms has void type, which can happen
3985 in C++ when the arm is throw. */
3987 /* Keep the original source location on the first 'if'. Set the source
3988 location of the ? on the second 'if'. */
3989 new_locus = rexpr_location (pred, locus);
3990 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3991 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3992 false_label_p, locus),
3993 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3994 false_label_p, new_locus));
3996 else
3998 expr = build3 (COND_EXPR, void_type_node, pred,
3999 build_and_jump (true_label_p),
4000 build_and_jump (false_label_p));
4001 SET_EXPR_LOCATION (expr, locus);
4004 if (local_label)
4006 t = build1 (LABEL_EXPR, void_type_node, local_label);
4007 append_to_statement_list (t, &expr);
4010 return expr;
4013 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
4014 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
4015 statement, if it is the last one. Otherwise, return NULL. */
4017 static tree
4018 find_goto (tree expr)
4020 if (!expr)
4021 return NULL_TREE;
4023 if (TREE_CODE (expr) == GOTO_EXPR)
4024 return expr;
4026 if (TREE_CODE (expr) != STATEMENT_LIST)
4027 return NULL_TREE;
4029 tree_stmt_iterator i = tsi_start (expr);
4031 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
4032 tsi_next (&i);
4034 if (!tsi_one_before_end_p (i))
4035 return NULL_TREE;
4037 return find_goto (tsi_stmt (i));
4040 /* Same as find_goto, except that it returns NULL if the destination
4041 is not a LABEL_DECL. */
4043 static inline tree
4044 find_goto_label (tree expr)
4046 tree dest = find_goto (expr);
4047 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
4048 return dest;
4049 return NULL_TREE;
4052 /* Given a conditional expression EXPR with short-circuit boolean
4053 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4054 predicate apart into the equivalent sequence of conditionals. */
4056 static tree
4057 shortcut_cond_expr (tree expr)
4059 tree pred = TREE_OPERAND (expr, 0);
4060 tree then_ = TREE_OPERAND (expr, 1);
4061 tree else_ = TREE_OPERAND (expr, 2);
4062 tree true_label, false_label, end_label, t;
4063 tree *true_label_p;
4064 tree *false_label_p;
4065 bool emit_end, emit_false, jump_over_else;
4066 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
4067 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
4069 /* First do simple transformations. */
4070 if (!else_se)
4072 /* If there is no 'else', turn
4073 if (a && b) then c
4074 into
4075 if (a) if (b) then c. */
4076 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4078 /* Keep the original source location on the first 'if'. */
4079 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4080 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4081 /* Set the source location of the && on the second 'if'. */
4082 if (rexpr_has_location (pred))
4083 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4084 then_ = shortcut_cond_expr (expr);
4085 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4086 pred = TREE_OPERAND (pred, 0);
4087 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
4088 SET_EXPR_LOCATION (expr, locus);
4092 if (!then_se)
4094 /* If there is no 'then', turn
4095 if (a || b); else d
4096 into
4097 if (a); else if (b); else d. */
4098 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4100 /* Keep the original source location on the first 'if'. */
4101 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4102 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4103 /* Set the source location of the || on the second 'if'. */
4104 if (rexpr_has_location (pred))
4105 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4106 else_ = shortcut_cond_expr (expr);
4107 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4108 pred = TREE_OPERAND (pred, 0);
4109 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
4110 SET_EXPR_LOCATION (expr, locus);
4114 /* If we're done, great. */
4115 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
4116 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
4117 return expr;
4119 /* Otherwise we need to mess with gotos. Change
4120 if (a) c; else d;
4122 if (a); else goto no;
4123 c; goto end;
4124 no: d; end:
4125 and recursively gimplify the condition. */
4127 true_label = false_label = end_label = NULL_TREE;
4129 /* If our arms just jump somewhere, hijack those labels so we don't
4130 generate jumps to jumps. */
4132 if (tree then_goto = find_goto_label (then_))
4134 true_label = GOTO_DESTINATION (then_goto);
4135 then_ = NULL;
4136 then_se = false;
4139 if (tree else_goto = find_goto_label (else_))
4141 false_label = GOTO_DESTINATION (else_goto);
4142 else_ = NULL;
4143 else_se = false;
4146 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4147 if (true_label)
4148 true_label_p = &true_label;
4149 else
4150 true_label_p = NULL;
4152 /* The 'else' branch also needs a label if it contains interesting code. */
4153 if (false_label || else_se)
4154 false_label_p = &false_label;
4155 else
4156 false_label_p = NULL;
4158 /* If there was nothing else in our arms, just forward the label(s). */
4159 if (!then_se && !else_se)
4160 return shortcut_cond_r (pred, true_label_p, false_label_p,
4161 EXPR_LOC_OR_LOC (expr, input_location));
4163 /* If our last subexpression already has a terminal label, reuse it. */
4164 if (else_se)
4165 t = expr_last (else_);
4166 else if (then_se)
4167 t = expr_last (then_);
4168 else
4169 t = NULL;
4170 if (t && TREE_CODE (t) == LABEL_EXPR)
4171 end_label = LABEL_EXPR_LABEL (t);
4173 /* If we don't care about jumping to the 'else' branch, jump to the end
4174 if the condition is false. */
4175 if (!false_label_p)
4176 false_label_p = &end_label;
4178 /* We only want to emit these labels if we aren't hijacking them. */
4179 emit_end = (end_label == NULL_TREE);
4180 emit_false = (false_label == NULL_TREE);
4182 /* We only emit the jump over the else clause if we have to--if the
4183 then clause may fall through. Otherwise we can wind up with a
4184 useless jump and a useless label at the end of gimplified code,
4185 which will cause us to think that this conditional as a whole
4186 falls through even if it doesn't. If we then inline a function
4187 which ends with such a condition, that can cause us to issue an
4188 inappropriate warning about control reaching the end of a
4189 non-void function. */
4190 jump_over_else = block_may_fallthru (then_);
4192 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
4193 EXPR_LOC_OR_LOC (expr, input_location));
4195 expr = NULL;
4196 append_to_statement_list (pred, &expr);
4198 append_to_statement_list (then_, &expr);
4199 if (else_se)
4201 if (jump_over_else)
4203 tree last = expr_last (expr);
4204 t = build_and_jump (&end_label);
4205 if (rexpr_has_location (last))
4206 SET_EXPR_LOCATION (t, rexpr_location (last));
4207 append_to_statement_list (t, &expr);
4209 if (emit_false)
4211 t = build1 (LABEL_EXPR, void_type_node, false_label);
4212 append_to_statement_list (t, &expr);
4214 append_to_statement_list (else_, &expr);
4216 if (emit_end && end_label)
4218 t = build1 (LABEL_EXPR, void_type_node, end_label);
4219 append_to_statement_list (t, &expr);
4222 return expr;
4225 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4227 tree
4228 gimple_boolify (tree expr)
4230 tree type = TREE_TYPE (expr);
4231 location_t loc = EXPR_LOCATION (expr);
4233 if (TREE_CODE (expr) == NE_EXPR
4234 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
4235 && integer_zerop (TREE_OPERAND (expr, 1)))
4237 tree call = TREE_OPERAND (expr, 0);
4238 tree fn = get_callee_fndecl (call);
4240 /* For __builtin_expect ((long) (x), y) recurse into x as well
4241 if x is truth_value_p. */
4242 if (fn
4243 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
4244 && call_expr_nargs (call) == 2)
4246 tree arg = CALL_EXPR_ARG (call, 0);
4247 if (arg)
4249 if (TREE_CODE (arg) == NOP_EXPR
4250 && TREE_TYPE (arg) == TREE_TYPE (call))
4251 arg = TREE_OPERAND (arg, 0);
4252 if (truth_value_p (TREE_CODE (arg)))
4254 arg = gimple_boolify (arg);
4255 CALL_EXPR_ARG (call, 0)
4256 = fold_convert_loc (loc, TREE_TYPE (call), arg);
4262 switch (TREE_CODE (expr))
4264 case TRUTH_AND_EXPR:
4265 case TRUTH_OR_EXPR:
4266 case TRUTH_XOR_EXPR:
4267 case TRUTH_ANDIF_EXPR:
4268 case TRUTH_ORIF_EXPR:
4269 /* Also boolify the arguments of truth exprs. */
4270 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
4271 /* FALLTHRU */
4273 case TRUTH_NOT_EXPR:
4274 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4276 /* These expressions always produce boolean results. */
4277 if (TREE_CODE (type) != BOOLEAN_TYPE)
4278 TREE_TYPE (expr) = boolean_type_node;
4279 return expr;
4281 case ANNOTATE_EXPR:
4282 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4284 case annot_expr_ivdep_kind:
4285 case annot_expr_unroll_kind:
4286 case annot_expr_no_vector_kind:
4287 case annot_expr_vector_kind:
4288 case annot_expr_parallel_kind:
4289 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4290 if (TREE_CODE (type) != BOOLEAN_TYPE)
4291 TREE_TYPE (expr) = boolean_type_node;
4292 return expr;
4293 default:
4294 gcc_unreachable ();
4297 default:
4298 if (COMPARISON_CLASS_P (expr))
4300 /* These expressions always produce boolean results. */
4301 if (TREE_CODE (type) != BOOLEAN_TYPE)
4302 TREE_TYPE (expr) = boolean_type_node;
4303 return expr;
4305 /* Other expressions that get here must have boolean values, but
4306 might need to be converted to the appropriate mode. */
4307 if (TREE_CODE (type) == BOOLEAN_TYPE)
4308 return expr;
4309 return fold_convert_loc (loc, boolean_type_node, expr);
4313 /* Given a conditional expression *EXPR_P without side effects, gimplify
4314 its operands. New statements are inserted to PRE_P. */
4316 static enum gimplify_status
4317 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4319 tree expr = *expr_p, cond;
4320 enum gimplify_status ret, tret;
4321 enum tree_code code;
4323 cond = gimple_boolify (COND_EXPR_COND (expr));
4325 /* We need to handle && and || specially, as their gimplification
4326 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4327 code = TREE_CODE (cond);
4328 if (code == TRUTH_ANDIF_EXPR)
4329 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4330 else if (code == TRUTH_ORIF_EXPR)
4331 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4332 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_val, fb_rvalue);
4333 COND_EXPR_COND (*expr_p) = cond;
4335 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4336 is_gimple_val, fb_rvalue);
4337 ret = MIN (ret, tret);
4338 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4339 is_gimple_val, fb_rvalue);
4341 return MIN (ret, tret);
4344 /* Return true if evaluating EXPR could trap.
4345 EXPR is GENERIC, while tree_could_trap_p can be called
4346 only on GIMPLE. */
4348 bool
4349 generic_expr_could_trap_p (tree expr)
4351 unsigned i, n;
4353 if (!expr || is_gimple_val (expr))
4354 return false;
4356 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4357 return true;
4359 n = TREE_OPERAND_LENGTH (expr);
4360 for (i = 0; i < n; i++)
4361 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4362 return true;
4364 return false;
4367 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4368 into
4370 if (p) if (p)
4371 t1 = a; a;
4372 else or else
4373 t1 = b; b;
4376 The second form is used when *EXPR_P is of type void.
4378 PRE_P points to the list where side effects that must happen before
4379 *EXPR_P should be stored. */
4381 static enum gimplify_status
4382 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4384 tree expr = *expr_p;
4385 tree type = TREE_TYPE (expr);
4386 location_t loc = EXPR_LOCATION (expr);
4387 tree tmp, arm1, arm2;
4388 enum gimplify_status ret;
4389 tree label_true, label_false, label_cont;
4390 bool have_then_clause_p, have_else_clause_p;
4391 gcond *cond_stmt;
4392 enum tree_code pred_code;
4393 gimple_seq seq = NULL;
4395 /* If this COND_EXPR has a value, copy the values into a temporary within
4396 the arms. */
4397 if (!VOID_TYPE_P (type))
4399 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4400 tree result;
4402 /* If either an rvalue is ok or we do not require an lvalue, create the
4403 temporary. But we cannot do that if the type is addressable. */
4404 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4405 && !TREE_ADDRESSABLE (type))
4407 if (gimplify_ctxp->allow_rhs_cond_expr
4408 /* If either branch has side effects or could trap, it can't be
4409 evaluated unconditionally. */
4410 && !TREE_SIDE_EFFECTS (then_)
4411 && !generic_expr_could_trap_p (then_)
4412 && !TREE_SIDE_EFFECTS (else_)
4413 && !generic_expr_could_trap_p (else_))
4414 return gimplify_pure_cond_expr (expr_p, pre_p);
4416 tmp = create_tmp_var (type, "iftmp");
4417 result = tmp;
4420 /* Otherwise, only create and copy references to the values. */
4421 else
4423 type = build_pointer_type (type);
4425 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4426 then_ = build_fold_addr_expr_loc (loc, then_);
4428 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4429 else_ = build_fold_addr_expr_loc (loc, else_);
4431 expr
4432 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4434 tmp = create_tmp_var (type, "iftmp");
4435 result = build_simple_mem_ref_loc (loc, tmp);
4438 /* Build the new then clause, `tmp = then_;'. But don't build the
4439 assignment if the value is void; in C++ it can be if it's a throw. */
4440 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4441 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4443 /* Similarly, build the new else clause, `tmp = else_;'. */
4444 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4445 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4447 TREE_TYPE (expr) = void_type_node;
4448 recalculate_side_effects (expr);
4450 /* Move the COND_EXPR to the prequeue. */
4451 gimplify_stmt (&expr, pre_p);
4453 *expr_p = result;
4454 return GS_ALL_DONE;
4457 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4458 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4459 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4460 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4462 /* Make sure the condition has BOOLEAN_TYPE. */
4463 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4465 /* Break apart && and || conditions. */
4466 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4467 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4469 expr = shortcut_cond_expr (expr);
4471 if (expr != *expr_p)
4473 *expr_p = expr;
4475 /* We can't rely on gimplify_expr to re-gimplify the expanded
4476 form properly, as cleanups might cause the target labels to be
4477 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4478 set up a conditional context. */
4479 gimple_push_condition ();
4480 gimplify_stmt (expr_p, &seq);
4481 gimple_pop_condition (pre_p);
4482 gimple_seq_add_seq (pre_p, seq);
4484 return GS_ALL_DONE;
4488 /* Now do the normal gimplification. */
4490 /* Gimplify condition. */
4491 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4492 is_gimple_condexpr_for_cond, fb_rvalue);
4493 if (ret == GS_ERROR)
4494 return GS_ERROR;
4495 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4497 gimple_push_condition ();
4499 have_then_clause_p = have_else_clause_p = false;
4500 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4501 if (label_true
4502 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4503 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4504 have different locations, otherwise we end up with incorrect
4505 location information on the branches. */
4506 && (optimize
4507 || !EXPR_HAS_LOCATION (expr)
4508 || !rexpr_has_location (label_true)
4509 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4511 have_then_clause_p = true;
4512 label_true = GOTO_DESTINATION (label_true);
4514 else
4515 label_true = create_artificial_label (UNKNOWN_LOCATION);
4516 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4517 if (label_false
4518 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4519 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4520 have different locations, otherwise we end up with incorrect
4521 location information on the branches. */
4522 && (optimize
4523 || !EXPR_HAS_LOCATION (expr)
4524 || !rexpr_has_location (label_false)
4525 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4527 have_else_clause_p = true;
4528 label_false = GOTO_DESTINATION (label_false);
4530 else
4531 label_false = create_artificial_label (UNKNOWN_LOCATION);
4533 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4534 &arm2);
4535 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4536 label_false);
4537 gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
4538 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4539 gimplify_seq_add_stmt (&seq, cond_stmt);
4540 gimple_stmt_iterator gsi = gsi_last (seq);
4541 maybe_fold_stmt (&gsi);
4543 label_cont = NULL_TREE;
4544 if (!have_then_clause_p)
4546 /* For if (...) {} else { code; } put label_true after
4547 the else block. */
4548 if (TREE_OPERAND (expr, 1) == NULL_TREE
4549 && !have_else_clause_p
4550 && TREE_OPERAND (expr, 2) != NULL_TREE)
4552 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4553 handling that label_cont == label_true can be only reached
4554 through fallthrough from { code; }. */
4555 if (integer_zerop (COND_EXPR_COND (expr)))
4556 UNUSED_LABEL_P (label_true) = 1;
4557 label_cont = label_true;
4559 else
4561 bool then_side_effects
4562 = (TREE_OPERAND (expr, 1)
4563 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
4564 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4565 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4566 /* For if (...) { code; } else {} or
4567 if (...) { code; } else goto label; or
4568 if (...) { code; return; } else { ... }
4569 label_cont isn't needed. */
4570 if (!have_else_clause_p
4571 && TREE_OPERAND (expr, 2) != NULL_TREE
4572 && gimple_seq_may_fallthru (seq))
4574 gimple *g;
4575 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4577 /* For if (0) { non-side-effect-code } else { code }
4578 tell -Wimplicit-fallthrough handling that label_cont can
4579 be only reached through fallthrough from { code }. */
4580 if (integer_zerop (COND_EXPR_COND (expr)))
4582 UNUSED_LABEL_P (label_true) = 1;
4583 if (!then_side_effects)
4584 UNUSED_LABEL_P (label_cont) = 1;
4587 g = gimple_build_goto (label_cont);
4589 /* GIMPLE_COND's are very low level; they have embedded
4590 gotos. This particular embedded goto should not be marked
4591 with the location of the original COND_EXPR, as it would
4592 correspond to the COND_EXPR's condition, not the ELSE or the
4593 THEN arms. To avoid marking it with the wrong location, flag
4594 it as "no location". */
4595 gimple_set_do_not_emit_location (g);
4597 gimplify_seq_add_stmt (&seq, g);
4601 if (!have_else_clause_p)
4603 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4604 tell -Wimplicit-fallthrough handling that label_false can be only
4605 reached through fallthrough from { code }. */
4606 if (integer_nonzerop (COND_EXPR_COND (expr))
4607 && (TREE_OPERAND (expr, 2) == NULL_TREE
4608 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
4609 UNUSED_LABEL_P (label_false) = 1;
4610 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4611 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4613 if (label_cont)
4614 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4616 gimple_pop_condition (pre_p);
4617 gimple_seq_add_seq (pre_p, seq);
4619 if (ret == GS_ERROR)
4620 ; /* Do nothing. */
4621 else if (have_then_clause_p || have_else_clause_p)
4622 ret = GS_ALL_DONE;
4623 else
4625 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4626 expr = TREE_OPERAND (expr, 0);
4627 gimplify_stmt (&expr, pre_p);
4630 *expr_p = NULL;
4631 return ret;
4634 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4635 to be marked addressable.
4637 We cannot rely on such an expression being directly markable if a temporary
4638 has been created by the gimplification. In this case, we create another
4639 temporary and initialize it with a copy, which will become a store after we
4640 mark it addressable. This can happen if the front-end passed us something
4641 that it could not mark addressable yet, like a Fortran pass-by-reference
4642 parameter (int) floatvar. */
4644 static void
4645 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4647 while (handled_component_p (*expr_p))
4648 expr_p = &TREE_OPERAND (*expr_p, 0);
4650 /* Do not allow an SSA name as the temporary. */
4651 if (is_gimple_reg (*expr_p))
4652 *expr_p = internal_get_tmp_var (*expr_p, seq_p, NULL, false, false, true);
4655 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4656 a call to __builtin_memcpy. */
4658 static enum gimplify_status
4659 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4660 gimple_seq *seq_p)
4662 tree t, to, to_ptr, from, from_ptr;
4663 gcall *gs;
4664 location_t loc = EXPR_LOCATION (*expr_p);
4666 to = TREE_OPERAND (*expr_p, 0);
4667 from = TREE_OPERAND (*expr_p, 1);
4669 /* Mark the RHS addressable. Beware that it may not be possible to do so
4670 directly if a temporary has been created by the gimplification. */
4671 prepare_gimple_addressable (&from, seq_p);
4673 mark_addressable (from);
4674 from_ptr = build_fold_addr_expr_loc (loc, from);
4675 gimplify_arg (&from_ptr, seq_p, loc);
4677 mark_addressable (to);
4678 to_ptr = build_fold_addr_expr_loc (loc, to);
4679 gimplify_arg (&to_ptr, seq_p, loc);
4681 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4683 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4684 gimple_call_set_alloca_for_var (gs, true);
4686 if (want_value)
4688 /* tmp = memcpy() */
4689 t = create_tmp_var (TREE_TYPE (to_ptr));
4690 gimple_call_set_lhs (gs, t);
4691 gimplify_seq_add_stmt (seq_p, gs);
4693 *expr_p = build_simple_mem_ref (t);
4694 return GS_ALL_DONE;
4697 gimplify_seq_add_stmt (seq_p, gs);
4698 *expr_p = NULL;
4699 return GS_ALL_DONE;
4702 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4703 a call to __builtin_memset. In this case we know that the RHS is
4704 a CONSTRUCTOR with an empty element list. */
4706 static enum gimplify_status
4707 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4708 gimple_seq *seq_p)
4710 tree t, from, to, to_ptr;
4711 gcall *gs;
4712 location_t loc = EXPR_LOCATION (*expr_p);
4714 /* Assert our assumptions, to abort instead of producing wrong code
4715 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4716 not be immediately exposed. */
4717 from = TREE_OPERAND (*expr_p, 1);
4718 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4719 from = TREE_OPERAND (from, 0);
4721 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4722 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4724 /* Now proceed. */
4725 to = TREE_OPERAND (*expr_p, 0);
4727 to_ptr = build_fold_addr_expr_loc (loc, to);
4728 gimplify_arg (&to_ptr, seq_p, loc);
4729 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4731 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4733 if (want_value)
4735 /* tmp = memset() */
4736 t = create_tmp_var (TREE_TYPE (to_ptr));
4737 gimple_call_set_lhs (gs, t);
4738 gimplify_seq_add_stmt (seq_p, gs);
4740 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4741 return GS_ALL_DONE;
4744 gimplify_seq_add_stmt (seq_p, gs);
4745 *expr_p = NULL;
4746 return GS_ALL_DONE;
4749 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4750 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4751 assignment. Return non-null if we detect a potential overlap. */
4753 struct gimplify_init_ctor_preeval_data
4755 /* The base decl of the lhs object. May be NULL, in which case we
4756 have to assume the lhs is indirect. */
4757 tree lhs_base_decl;
4759 /* The alias set of the lhs object. */
4760 alias_set_type lhs_alias_set;
4763 static tree
4764 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4766 struct gimplify_init_ctor_preeval_data *data
4767 = (struct gimplify_init_ctor_preeval_data *) xdata;
4768 tree t = *tp;
4770 /* If we find the base object, obviously we have overlap. */
4771 if (data->lhs_base_decl == t)
4772 return t;
4774 /* If the constructor component is indirect, determine if we have a
4775 potential overlap with the lhs. The only bits of information we
4776 have to go on at this point are addressability and alias sets. */
4777 if ((INDIRECT_REF_P (t)
4778 || TREE_CODE (t) == MEM_REF)
4779 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4780 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4781 return t;
4783 /* If the constructor component is a call, determine if it can hide a
4784 potential overlap with the lhs through an INDIRECT_REF like above.
4785 ??? Ugh - this is completely broken. In fact this whole analysis
4786 doesn't look conservative. */
4787 if (TREE_CODE (t) == CALL_EXPR)
4789 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4791 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4792 if (POINTER_TYPE_P (TREE_VALUE (type))
4793 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4794 && alias_sets_conflict_p (data->lhs_alias_set,
4795 get_alias_set
4796 (TREE_TYPE (TREE_VALUE (type)))))
4797 return t;
4800 if (IS_TYPE_OR_DECL_P (t))
4801 *walk_subtrees = 0;
4802 return NULL;
4805 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4806 force values that overlap with the lhs (as described by *DATA)
4807 into temporaries. */
4809 static void
4810 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4811 struct gimplify_init_ctor_preeval_data *data)
4813 enum gimplify_status one;
4815 /* If the value is constant, then there's nothing to pre-evaluate. */
4816 if (TREE_CONSTANT (*expr_p))
4818 /* Ensure it does not have side effects, it might contain a reference to
4819 the object we're initializing. */
4820 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4821 return;
4824 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4825 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4826 return;
4828 /* Recurse for nested constructors. */
4829 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4831 unsigned HOST_WIDE_INT ix;
4832 constructor_elt *ce;
4833 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4835 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4836 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4838 return;
4841 /* If this is a variable sized type, we must remember the size. */
4842 maybe_with_size_expr (expr_p);
4844 /* Gimplify the constructor element to something appropriate for the rhs
4845 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4846 the gimplifier will consider this a store to memory. Doing this
4847 gimplification now means that we won't have to deal with complicated
4848 language-specific trees, nor trees like SAVE_EXPR that can induce
4849 exponential search behavior. */
4850 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4851 if (one == GS_ERROR)
4853 *expr_p = NULL;
4854 return;
4857 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4858 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4859 always be true for all scalars, since is_gimple_mem_rhs insists on a
4860 temporary variable for them. */
4861 if (DECL_P (*expr_p))
4862 return;
4864 /* If this is of variable size, we have no choice but to assume it doesn't
4865 overlap since we can't make a temporary for it. */
4866 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4867 return;
4869 /* Otherwise, we must search for overlap ... */
4870 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4871 return;
4873 /* ... and if found, force the value into a temporary. */
4874 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4877 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4878 a RANGE_EXPR in a CONSTRUCTOR for an array.
4880 var = lower;
4881 loop_entry:
4882 object[var] = value;
4883 if (var == upper)
4884 goto loop_exit;
4885 var = var + 1;
4886 goto loop_entry;
4887 loop_exit:
4889 We increment var _after_ the loop exit check because we might otherwise
4890 fail if upper == TYPE_MAX_VALUE (type for upper).
4892 Note that we never have to deal with SAVE_EXPRs here, because this has
4893 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4895 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4896 gimple_seq *, bool);
4898 static void
4899 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4900 tree value, tree array_elt_type,
4901 gimple_seq *pre_p, bool cleared)
4903 tree loop_entry_label, loop_exit_label, fall_thru_label;
4904 tree var, var_type, cref, tmp;
4906 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4907 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4908 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4910 /* Create and initialize the index variable. */
4911 var_type = TREE_TYPE (upper);
4912 var = create_tmp_var (var_type);
4913 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4915 /* Add the loop entry label. */
4916 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4918 /* Build the reference. */
4919 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4920 var, NULL_TREE, NULL_TREE);
4922 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4923 the store. Otherwise just assign value to the reference. */
4925 if (TREE_CODE (value) == CONSTRUCTOR)
4926 /* NB we might have to call ourself recursively through
4927 gimplify_init_ctor_eval if the value is a constructor. */
4928 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4929 pre_p, cleared);
4930 else
4932 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
4933 != GS_ERROR)
4934 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4937 /* We exit the loop when the index var is equal to the upper bound. */
4938 gimplify_seq_add_stmt (pre_p,
4939 gimple_build_cond (EQ_EXPR, var, upper,
4940 loop_exit_label, fall_thru_label));
4942 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4944 /* Otherwise, increment the index var... */
4945 tmp = build2 (PLUS_EXPR, var_type, var,
4946 fold_convert (var_type, integer_one_node));
4947 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4949 /* ...and jump back to the loop entry. */
4950 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4952 /* Add the loop exit label. */
4953 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4956 /* A subroutine of gimplify_init_constructor. Generate individual
4957 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4958 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4959 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4960 zeroed first. */
4962 static void
4963 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4964 gimple_seq *pre_p, bool cleared)
4966 tree array_elt_type = NULL;
4967 unsigned HOST_WIDE_INT ix;
4968 tree purpose, value;
4970 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4971 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4973 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4975 tree cref;
4977 /* NULL values are created above for gimplification errors. */
4978 if (value == NULL)
4979 continue;
4981 if (cleared && initializer_zerop (value))
4982 continue;
4984 /* ??? Here's to hoping the front end fills in all of the indices,
4985 so we don't have to figure out what's missing ourselves. */
4986 gcc_assert (purpose);
4988 /* Skip zero-sized fields, unless value has side-effects. This can
4989 happen with calls to functions returning a empty type, which
4990 we shouldn't discard. As a number of downstream passes don't
4991 expect sets of empty type fields, we rely on the gimplification of
4992 the MODIFY_EXPR we make below to drop the assignment statement. */
4993 if (!TREE_SIDE_EFFECTS (value)
4994 && TREE_CODE (purpose) == FIELD_DECL
4995 && is_empty_type (TREE_TYPE (purpose)))
4996 continue;
4998 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4999 whole range. */
5000 if (TREE_CODE (purpose) == RANGE_EXPR)
5002 tree lower = TREE_OPERAND (purpose, 0);
5003 tree upper = TREE_OPERAND (purpose, 1);
5005 /* If the lower bound is equal to upper, just treat it as if
5006 upper was the index. */
5007 if (simple_cst_equal (lower, upper))
5008 purpose = upper;
5009 else
5011 gimplify_init_ctor_eval_range (object, lower, upper, value,
5012 array_elt_type, pre_p, cleared);
5013 continue;
5017 if (array_elt_type)
5019 /* Do not use bitsizetype for ARRAY_REF indices. */
5020 if (TYPE_DOMAIN (TREE_TYPE (object)))
5021 purpose
5022 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
5023 purpose);
5024 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
5025 purpose, NULL_TREE, NULL_TREE);
5027 else
5029 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
5030 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
5031 unshare_expr (object), purpose, NULL_TREE);
5034 if (TREE_CODE (value) == CONSTRUCTOR
5035 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
5036 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5037 pre_p, cleared);
5038 else
5040 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
5041 gimplify_and_add (init, pre_p);
5042 ggc_free (init);
5047 /* Return the appropriate RHS predicate for this LHS. */
5049 gimple_predicate
5050 rhs_predicate_for (tree lhs)
5052 if (is_gimple_reg (lhs))
5053 return is_gimple_reg_rhs_or_call;
5054 else
5055 return is_gimple_mem_rhs_or_call;
5058 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5059 before the LHS has been gimplified. */
5061 static gimple_predicate
5062 initial_rhs_predicate_for (tree lhs)
5064 if (is_gimple_reg_type (TREE_TYPE (lhs)))
5065 return is_gimple_reg_rhs_or_call;
5066 else
5067 return is_gimple_mem_rhs_or_call;
5070 /* Gimplify a C99 compound literal expression. This just means adding
5071 the DECL_EXPR before the current statement and using its anonymous
5072 decl instead. */
5074 static enum gimplify_status
5075 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
5076 bool (*gimple_test_f) (tree),
5077 fallback_t fallback)
5079 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
5080 tree decl = DECL_EXPR_DECL (decl_s);
5081 tree init = DECL_INITIAL (decl);
5082 /* Mark the decl as addressable if the compound literal
5083 expression is addressable now, otherwise it is marked too late
5084 after we gimplify the initialization expression. */
5085 if (TREE_ADDRESSABLE (*expr_p))
5086 TREE_ADDRESSABLE (decl) = 1;
5087 /* Otherwise, if we don't need an lvalue and have a literal directly
5088 substitute it. Check if it matches the gimple predicate, as
5089 otherwise we'd generate a new temporary, and we can as well just
5090 use the decl we already have. */
5091 else if (!TREE_ADDRESSABLE (decl)
5092 && !TREE_THIS_VOLATILE (decl)
5093 && init
5094 && (fallback & fb_lvalue) == 0
5095 && gimple_test_f (init))
5097 *expr_p = init;
5098 return GS_OK;
5101 /* If the decl is not addressable, then it is being used in some
5102 expression or on the right hand side of a statement, and it can
5103 be put into a readonly data section. */
5104 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
5105 TREE_READONLY (decl) = 1;
5107 /* This decl isn't mentioned in the enclosing block, so add it to the
5108 list of temps. FIXME it seems a bit of a kludge to say that
5109 anonymous artificial vars aren't pushed, but everything else is. */
5110 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
5111 gimple_add_tmp_var (decl);
5113 gimplify_and_add (decl_s, pre_p);
5114 *expr_p = decl;
5115 return GS_OK;
5118 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5119 return a new CONSTRUCTOR if something changed. */
5121 static tree
5122 optimize_compound_literals_in_ctor (tree orig_ctor)
5124 tree ctor = orig_ctor;
5125 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
5126 unsigned int idx, num = vec_safe_length (elts);
5128 for (idx = 0; idx < num; idx++)
5130 tree value = (*elts)[idx].value;
5131 tree newval = value;
5132 if (TREE_CODE (value) == CONSTRUCTOR)
5133 newval = optimize_compound_literals_in_ctor (value);
5134 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
5136 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
5137 tree decl = DECL_EXPR_DECL (decl_s);
5138 tree init = DECL_INITIAL (decl);
5140 if (!TREE_ADDRESSABLE (value)
5141 && !TREE_ADDRESSABLE (decl)
5142 && init
5143 && TREE_CODE (init) == CONSTRUCTOR)
5144 newval = optimize_compound_literals_in_ctor (init);
5146 if (newval == value)
5147 continue;
5149 if (ctor == orig_ctor)
5151 ctor = copy_node (orig_ctor);
5152 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
5153 elts = CONSTRUCTOR_ELTS (ctor);
5155 (*elts)[idx].value = newval;
5157 return ctor;
5160 /* A subroutine of gimplify_modify_expr. Break out elements of a
5161 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5163 Note that we still need to clear any elements that don't have explicit
5164 initializers, so if not all elements are initialized we keep the
5165 original MODIFY_EXPR, we just remove all of the constructor elements.
5167 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5168 GS_ERROR if we would have to create a temporary when gimplifying
5169 this constructor. Otherwise, return GS_OK.
5171 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5173 static enum gimplify_status
5174 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5175 bool want_value, bool notify_temp_creation)
5177 tree object, ctor, type;
5178 enum gimplify_status ret;
5179 vec<constructor_elt, va_gc> *elts;
5180 bool cleared = false;
5181 bool is_empty_ctor = false;
5182 bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
5184 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
5186 if (!notify_temp_creation)
5188 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5189 is_gimple_lvalue, fb_lvalue);
5190 if (ret == GS_ERROR)
5191 return ret;
5194 object = TREE_OPERAND (*expr_p, 0);
5195 ctor = TREE_OPERAND (*expr_p, 1)
5196 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
5197 type = TREE_TYPE (ctor);
5198 elts = CONSTRUCTOR_ELTS (ctor);
5199 ret = GS_ALL_DONE;
5201 switch (TREE_CODE (type))
5203 case RECORD_TYPE:
5204 case UNION_TYPE:
5205 case QUAL_UNION_TYPE:
5206 case ARRAY_TYPE:
5208 /* Use readonly data for initializers of this or smaller size
5209 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5210 ratio. */
5211 const HOST_WIDE_INT min_unique_size = 64;
5212 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5213 is smaller than this, use readonly data. */
5214 const int unique_nonzero_ratio = 8;
5215 /* True if a single access of the object must be ensured. This is the
5216 case if the target is volatile, the type is non-addressable and more
5217 than one field need to be assigned. */
5218 const bool ensure_single_access
5219 = TREE_THIS_VOLATILE (object)
5220 && !TREE_ADDRESSABLE (type)
5221 && vec_safe_length (elts) > 1;
5222 struct gimplify_init_ctor_preeval_data preeval_data;
5223 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
5224 HOST_WIDE_INT num_unique_nonzero_elements;
5225 bool complete_p, valid_const_initializer;
5227 /* Aggregate types must lower constructors to initialization of
5228 individual elements. The exception is that a CONSTRUCTOR node
5229 with no elements indicates zero-initialization of the whole. */
5230 if (vec_safe_is_empty (elts))
5232 if (notify_temp_creation)
5233 return GS_OK;
5235 /* The var will be initialized and so appear on lhs of
5236 assignment, it can't be TREE_READONLY anymore. */
5237 if (VAR_P (object))
5238 TREE_READONLY (object) = 0;
5240 is_empty_ctor = true;
5241 break;
5244 /* Fetch information about the constructor to direct later processing.
5245 We might want to make static versions of it in various cases, and
5246 can only do so if it known to be a valid constant initializer. */
5247 valid_const_initializer
5248 = categorize_ctor_elements (ctor, &num_nonzero_elements,
5249 &num_unique_nonzero_elements,
5250 &num_ctor_elements, &complete_p);
5252 /* If a const aggregate variable is being initialized, then it
5253 should never be a lose to promote the variable to be static. */
5254 if (valid_const_initializer
5255 && num_nonzero_elements > 1
5256 && TREE_READONLY (object)
5257 && VAR_P (object)
5258 && !DECL_REGISTER (object)
5259 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)
5260 || DECL_MERGEABLE (object))
5261 /* For ctors that have many repeated nonzero elements
5262 represented through RANGE_EXPRs, prefer initializing
5263 those through runtime loops over copies of large amounts
5264 of data from readonly data section. */
5265 && (num_unique_nonzero_elements
5266 > num_nonzero_elements / unique_nonzero_ratio
5267 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
5268 <= (unsigned HOST_WIDE_INT) min_unique_size)))
5270 if (notify_temp_creation)
5271 return GS_ERROR;
5273 DECL_INITIAL (object) = ctor;
5274 TREE_STATIC (object) = 1;
5275 if (!DECL_NAME (object))
5276 DECL_NAME (object) = create_tmp_var_name ("C");
5277 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
5279 /* ??? C++ doesn't automatically append a .<number> to the
5280 assembler name, and even when it does, it looks at FE private
5281 data structures to figure out what that number should be,
5282 which are not set for this variable. I suppose this is
5283 important for local statics for inline functions, which aren't
5284 "local" in the object file sense. So in order to get a unique
5285 TU-local symbol, we must invoke the lhd version now. */
5286 lhd_set_decl_assembler_name (object);
5288 *expr_p = NULL_TREE;
5289 break;
5292 /* The var will be initialized and so appear on lhs of
5293 assignment, it can't be TREE_READONLY anymore. */
5294 if (VAR_P (object) && !notify_temp_creation)
5295 TREE_READONLY (object) = 0;
5297 /* If there are "lots" of initialized elements, even discounting
5298 those that are not address constants (and thus *must* be
5299 computed at runtime), then partition the constructor into
5300 constant and non-constant parts. Block copy the constant
5301 parts in, then generate code for the non-constant parts. */
5302 /* TODO. There's code in cp/typeck.cc to do this. */
5304 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
5305 /* store_constructor will ignore the clearing of variable-sized
5306 objects. Initializers for such objects must explicitly set
5307 every field that needs to be set. */
5308 cleared = false;
5309 else if (!complete_p)
5310 /* If the constructor isn't complete, clear the whole object
5311 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5313 ??? This ought not to be needed. For any element not present
5314 in the initializer, we should simply set them to zero. Except
5315 we'd need to *find* the elements that are not present, and that
5316 requires trickery to avoid quadratic compile-time behavior in
5317 large cases or excessive memory use in small cases. */
5318 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5319 else if (num_ctor_elements - num_nonzero_elements
5320 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5321 && num_nonzero_elements < num_ctor_elements / 4)
5322 /* If there are "lots" of zeros, it's more efficient to clear
5323 the memory and then set the nonzero elements. */
5324 cleared = true;
5325 else if (ensure_single_access && num_nonzero_elements == 0)
5326 /* If a single access to the target must be ensured and all elements
5327 are zero, then it's optimal to clear whatever their number. */
5328 cleared = true;
5329 else
5330 cleared = false;
5332 /* If there are "lots" of initialized elements, and all of them
5333 are valid address constants, then the entire initializer can
5334 be dropped to memory, and then memcpy'd out. Don't do this
5335 for sparse arrays, though, as it's more efficient to follow
5336 the standard CONSTRUCTOR behavior of memset followed by
5337 individual element initialization. Also don't do this for small
5338 all-zero initializers (which aren't big enough to merit
5339 clearing), and don't try to make bitwise copies of
5340 TREE_ADDRESSABLE types. */
5341 if (valid_const_initializer
5342 && complete_p
5343 && !(cleared || num_nonzero_elements == 0)
5344 && !TREE_ADDRESSABLE (type))
5346 HOST_WIDE_INT size = int_size_in_bytes (type);
5347 unsigned int align;
5349 /* ??? We can still get unbounded array types, at least
5350 from the C++ front end. This seems wrong, but attempt
5351 to work around it for now. */
5352 if (size < 0)
5354 size = int_size_in_bytes (TREE_TYPE (object));
5355 if (size >= 0)
5356 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5359 /* Find the maximum alignment we can assume for the object. */
5360 /* ??? Make use of DECL_OFFSET_ALIGN. */
5361 if (DECL_P (object))
5362 align = DECL_ALIGN (object);
5363 else
5364 align = TYPE_ALIGN (type);
5366 /* Do a block move either if the size is so small as to make
5367 each individual move a sub-unit move on average, or if it
5368 is so large as to make individual moves inefficient. */
5369 if (size > 0
5370 && num_nonzero_elements > 1
5371 /* For ctors that have many repeated nonzero elements
5372 represented through RANGE_EXPRs, prefer initializing
5373 those through runtime loops over copies of large amounts
5374 of data from readonly data section. */
5375 && (num_unique_nonzero_elements
5376 > num_nonzero_elements / unique_nonzero_ratio
5377 || size <= min_unique_size)
5378 && (size < num_nonzero_elements
5379 || !can_move_by_pieces (size, align)))
5381 if (notify_temp_creation)
5382 return GS_ERROR;
5384 walk_tree (&ctor, force_labels_r, NULL, NULL);
5385 ctor = tree_output_constant_def (ctor);
5386 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5387 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5388 TREE_OPERAND (*expr_p, 1) = ctor;
5390 /* This is no longer an assignment of a CONSTRUCTOR, but
5391 we still may have processing to do on the LHS. So
5392 pretend we didn't do anything here to let that happen. */
5393 return GS_UNHANDLED;
5397 /* If a single access to the target must be ensured and there are
5398 nonzero elements or the zero elements are not assigned en masse,
5399 initialize the target from a temporary. */
5400 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5402 if (notify_temp_creation)
5403 return GS_ERROR;
5405 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5406 TREE_OPERAND (*expr_p, 0) = temp;
5407 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5408 *expr_p,
5409 build2 (MODIFY_EXPR, void_type_node,
5410 object, temp));
5411 return GS_OK;
5414 if (notify_temp_creation)
5415 return GS_OK;
5417 /* If there are nonzero elements and if needed, pre-evaluate to capture
5418 elements overlapping with the lhs into temporaries. We must do this
5419 before clearing to fetch the values before they are zeroed-out. */
5420 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5422 preeval_data.lhs_base_decl = get_base_address (object);
5423 if (!DECL_P (preeval_data.lhs_base_decl))
5424 preeval_data.lhs_base_decl = NULL;
5425 preeval_data.lhs_alias_set = get_alias_set (object);
5427 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5428 pre_p, post_p, &preeval_data);
5431 bool ctor_has_side_effects_p
5432 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5434 if (cleared)
5436 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5437 Note that we still have to gimplify, in order to handle the
5438 case of variable sized types. Avoid shared tree structures. */
5439 CONSTRUCTOR_ELTS (ctor) = NULL;
5440 TREE_SIDE_EFFECTS (ctor) = 0;
5441 object = unshare_expr (object);
5442 gimplify_stmt (expr_p, pre_p);
5445 /* If we have not block cleared the object, or if there are nonzero
5446 elements in the constructor, or if the constructor has side effects,
5447 add assignments to the individual scalar fields of the object. */
5448 if (!cleared
5449 || num_nonzero_elements > 0
5450 || ctor_has_side_effects_p)
5451 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5453 *expr_p = NULL_TREE;
5455 break;
5457 case COMPLEX_TYPE:
5459 tree r, i;
5461 if (notify_temp_creation)
5462 return GS_OK;
5464 /* Extract the real and imaginary parts out of the ctor. */
5465 gcc_assert (elts->length () == 2);
5466 r = (*elts)[0].value;
5467 i = (*elts)[1].value;
5468 if (r == NULL || i == NULL)
5470 tree zero = build_zero_cst (TREE_TYPE (type));
5471 if (r == NULL)
5472 r = zero;
5473 if (i == NULL)
5474 i = zero;
5477 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5478 represent creation of a complex value. */
5479 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5481 ctor = build_complex (type, r, i);
5482 TREE_OPERAND (*expr_p, 1) = ctor;
5484 else
5486 ctor = build2 (COMPLEX_EXPR, type, r, i);
5487 TREE_OPERAND (*expr_p, 1) = ctor;
5488 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5489 pre_p,
5490 post_p,
5491 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5492 fb_rvalue);
5495 break;
5497 case VECTOR_TYPE:
5499 unsigned HOST_WIDE_INT ix;
5500 constructor_elt *ce;
5502 if (notify_temp_creation)
5503 return GS_OK;
5505 /* Vector types use CONSTRUCTOR all the way through gimple
5506 compilation as a general initializer. */
5507 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5509 enum gimplify_status tret;
5510 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5511 fb_rvalue);
5512 if (tret == GS_ERROR)
5513 ret = GS_ERROR;
5514 else if (TREE_STATIC (ctor)
5515 && !initializer_constant_valid_p (ce->value,
5516 TREE_TYPE (ce->value)))
5517 TREE_STATIC (ctor) = 0;
5519 recompute_constructor_flags (ctor);
5521 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5522 if (TREE_CONSTANT (ctor))
5524 bool constant_p = true;
5525 tree value;
5527 /* Even when ctor is constant, it might contain non-*_CST
5528 elements, such as addresses or trapping values like
5529 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5530 in VECTOR_CST nodes. */
5531 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5532 if (!CONSTANT_CLASS_P (value))
5534 constant_p = false;
5535 break;
5538 if (constant_p)
5540 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5541 break;
5545 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5546 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5548 break;
5550 default:
5551 /* So how did we get a CONSTRUCTOR for a scalar type? */
5552 gcc_unreachable ();
5555 if (ret == GS_ERROR)
5556 return GS_ERROR;
5557 /* If we have gimplified both sides of the initializer but have
5558 not emitted an assignment, do so now. */
5559 if (*expr_p
5560 /* If the type is an empty type, we don't need to emit the
5561 assignment. */
5562 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
5564 tree lhs = TREE_OPERAND (*expr_p, 0);
5565 tree rhs = TREE_OPERAND (*expr_p, 1);
5566 if (want_value && object == lhs)
5567 lhs = unshare_expr (lhs);
5568 gassign *init = gimple_build_assign (lhs, rhs);
5569 gimplify_seq_add_stmt (pre_p, init);
5571 if (want_value)
5573 *expr_p = object;
5574 ret = GS_OK;
5576 else
5578 *expr_p = NULL;
5579 ret = GS_ALL_DONE;
5582 /* If the user requests to initialize automatic variables, we
5583 should initialize paddings inside the variable. Add a call to
5584 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5585 initialize paddings of object always to zero regardless of
5586 INIT_TYPE. Note, we will not insert this call if the aggregate
5587 variable has be completely cleared already or it's initialized
5588 with an empty constructor. We cannot insert this call if the
5589 variable is a gimple register since __builtin_clear_padding will take
5590 the address of the variable. As a result, if a long double/_Complex long
5591 double variable will be spilled into stack later, its padding cannot
5592 be cleared with __builtin_clear_padding. We should clear its padding
5593 when it is spilled into memory. */
5594 if (is_init_expr
5595 && !is_gimple_reg (object)
5596 && clear_padding_type_may_have_padding_p (type)
5597 && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
5598 || !AGGREGATE_TYPE_P (type))
5599 && is_var_need_auto_init (object))
5600 gimple_add_padding_init_for_auto_var (object, false, pre_p);
5602 return ret;
5605 /* Given a pointer value OP0, return a simplified version of an
5606 indirection through OP0, or NULL_TREE if no simplification is
5607 possible. This may only be applied to a rhs of an expression.
5608 Note that the resulting type may be different from the type pointed
5609 to in the sense that it is still compatible from the langhooks
5610 point of view. */
5612 static tree
5613 gimple_fold_indirect_ref_rhs (tree t)
5615 return gimple_fold_indirect_ref (t);
5618 /* Subroutine of gimplify_modify_expr to do simplifications of
5619 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5620 something changes. */
5622 static enum gimplify_status
5623 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5624 gimple_seq *pre_p, gimple_seq *post_p,
5625 bool want_value)
5627 enum gimplify_status ret = GS_UNHANDLED;
5628 bool changed;
5632 changed = false;
5633 switch (TREE_CODE (*from_p))
5635 case VAR_DECL:
5636 /* If we're assigning from a read-only variable initialized with
5637 a constructor and not volatile, do the direct assignment from
5638 the constructor, but only if the target is not volatile either
5639 since this latter assignment might end up being done on a per
5640 field basis. However, if the target is volatile and the type
5641 is aggregate and non-addressable, gimplify_init_constructor
5642 knows that it needs to ensure a single access to the target
5643 and it will return GS_OK only in this case. */
5644 if (TREE_READONLY (*from_p)
5645 && DECL_INITIAL (*from_p)
5646 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5647 && !TREE_THIS_VOLATILE (*from_p)
5648 && (!TREE_THIS_VOLATILE (*to_p)
5649 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5650 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5652 tree old_from = *from_p;
5653 enum gimplify_status subret;
5655 /* Move the constructor into the RHS. */
5656 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5658 /* Let's see if gimplify_init_constructor will need to put
5659 it in memory. */
5660 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5661 false, true);
5662 if (subret == GS_ERROR)
5664 /* If so, revert the change. */
5665 *from_p = old_from;
5667 else
5669 ret = GS_OK;
5670 changed = true;
5673 break;
5674 case INDIRECT_REF:
5675 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p)))
5676 /* If we have code like
5678 *(const A*)(A*)&x
5680 where the type of "x" is a (possibly cv-qualified variant
5681 of "A"), treat the entire expression as identical to "x".
5682 This kind of code arises in C++ when an object is bound
5683 to a const reference, and if "x" is a TARGET_EXPR we want
5684 to take advantage of the optimization below. But not if
5685 the type is TREE_ADDRESSABLE; then C++17 says that the
5686 TARGET_EXPR needs to be a temporary. */
5687 if (tree t
5688 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)))
5690 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5691 if (TREE_THIS_VOLATILE (t) != volatile_p)
5693 if (DECL_P (t))
5694 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5695 build_fold_addr_expr (t));
5696 if (REFERENCE_CLASS_P (t))
5697 TREE_THIS_VOLATILE (t) = volatile_p;
5699 *from_p = t;
5700 ret = GS_OK;
5701 changed = true;
5703 break;
5705 case TARGET_EXPR:
5707 /* If we are initializing something from a TARGET_EXPR, strip the
5708 TARGET_EXPR and initialize it directly, if possible. This can't
5709 be done if the initializer is void, since that implies that the
5710 temporary is set in some non-trivial way.
5712 ??? What about code that pulls out the temp and uses it
5713 elsewhere? I think that such code never uses the TARGET_EXPR as
5714 an initializer. If I'm wrong, we'll die because the temp won't
5715 have any RTL. In that case, I guess we'll need to replace
5716 references somehow. */
5717 tree init = TARGET_EXPR_INITIAL (*from_p);
5719 if (init
5720 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5721 || !TARGET_EXPR_NO_ELIDE (*from_p))
5722 && !VOID_TYPE_P (TREE_TYPE (init)))
5724 *from_p = init;
5725 ret = GS_OK;
5726 changed = true;
5729 break;
5731 case COMPOUND_EXPR:
5732 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5733 caught. */
5734 gimplify_compound_expr (from_p, pre_p, true);
5735 ret = GS_OK;
5736 changed = true;
5737 break;
5739 case CONSTRUCTOR:
5740 /* If we already made some changes, let the front end have a
5741 crack at this before we break it down. */
5742 if (ret != GS_UNHANDLED)
5743 break;
5745 /* If we're initializing from a CONSTRUCTOR, break this into
5746 individual MODIFY_EXPRs. */
5747 ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5748 false);
5749 return ret;
5751 case COND_EXPR:
5752 /* If we're assigning to a non-register type, push the assignment
5753 down into the branches. This is mandatory for ADDRESSABLE types,
5754 since we cannot generate temporaries for such, but it saves a
5755 copy in other cases as well. */
5756 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5758 /* This code should mirror the code in gimplify_cond_expr. */
5759 enum tree_code code = TREE_CODE (*expr_p);
5760 tree cond = *from_p;
5761 tree result = *to_p;
5763 ret = gimplify_expr (&result, pre_p, post_p,
5764 is_gimple_lvalue, fb_lvalue);
5765 if (ret != GS_ERROR)
5766 ret = GS_OK;
5768 /* If we are going to write RESULT more than once, clear
5769 TREE_READONLY flag, otherwise we might incorrectly promote
5770 the variable to static const and initialize it at compile
5771 time in one of the branches. */
5772 if (VAR_P (result)
5773 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5774 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5775 TREE_READONLY (result) = 0;
5776 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5777 TREE_OPERAND (cond, 1)
5778 = build2 (code, void_type_node, result,
5779 TREE_OPERAND (cond, 1));
5780 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5781 TREE_OPERAND (cond, 2)
5782 = build2 (code, void_type_node, unshare_expr (result),
5783 TREE_OPERAND (cond, 2));
5785 TREE_TYPE (cond) = void_type_node;
5786 recalculate_side_effects (cond);
5788 if (want_value)
5790 gimplify_and_add (cond, pre_p);
5791 *expr_p = unshare_expr (result);
5793 else
5794 *expr_p = cond;
5795 return ret;
5797 break;
5799 case CALL_EXPR:
5800 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5801 return slot so that we don't generate a temporary. */
5802 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5803 && aggregate_value_p (*from_p, *from_p))
5805 bool use_target;
5807 if (!(rhs_predicate_for (*to_p))(*from_p))
5808 /* If we need a temporary, *to_p isn't accurate. */
5809 use_target = false;
5810 /* It's OK to use the return slot directly unless it's an NRV. */
5811 else if (TREE_CODE (*to_p) == RESULT_DECL
5812 && DECL_NAME (*to_p) == NULL_TREE
5813 && needs_to_live_in_memory (*to_p))
5814 use_target = true;
5815 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5816 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5817 /* Don't force regs into memory. */
5818 use_target = false;
5819 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5820 /* It's OK to use the target directly if it's being
5821 initialized. */
5822 use_target = true;
5823 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5824 != INTEGER_CST)
5825 /* Always use the target and thus RSO for variable-sized types.
5826 GIMPLE cannot deal with a variable-sized assignment
5827 embedded in a call statement. */
5828 use_target = true;
5829 else if (TREE_CODE (*to_p) != SSA_NAME
5830 && (!is_gimple_variable (*to_p)
5831 || needs_to_live_in_memory (*to_p)))
5832 /* Don't use the original target if it's already addressable;
5833 if its address escapes, and the called function uses the
5834 NRV optimization, a conforming program could see *to_p
5835 change before the called function returns; see c++/19317.
5836 When optimizing, the return_slot pass marks more functions
5837 as safe after we have escape info. */
5838 use_target = false;
5839 else
5840 use_target = true;
5842 if (use_target)
5844 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5845 mark_addressable (*to_p);
5848 break;
5850 case WITH_SIZE_EXPR:
5851 /* Likewise for calls that return an aggregate of non-constant size,
5852 since we would not be able to generate a temporary at all. */
5853 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5855 *from_p = TREE_OPERAND (*from_p, 0);
5856 /* We don't change ret in this case because the
5857 WITH_SIZE_EXPR might have been added in
5858 gimplify_modify_expr, so returning GS_OK would lead to an
5859 infinite loop. */
5860 changed = true;
5862 break;
5864 /* If we're initializing from a container, push the initialization
5865 inside it. */
5866 case CLEANUP_POINT_EXPR:
5867 case BIND_EXPR:
5868 case STATEMENT_LIST:
5870 tree wrap = *from_p;
5871 tree t;
5873 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5874 fb_lvalue);
5875 if (ret != GS_ERROR)
5876 ret = GS_OK;
5878 t = voidify_wrapper_expr (wrap, *expr_p);
5879 gcc_assert (t == *expr_p);
5881 if (want_value)
5883 gimplify_and_add (wrap, pre_p);
5884 *expr_p = unshare_expr (*to_p);
5886 else
5887 *expr_p = wrap;
5888 return GS_OK;
5891 case NOP_EXPR:
5892 /* Pull out compound literal expressions from a NOP_EXPR.
5893 Those are created in the C FE to drop qualifiers during
5894 lvalue conversion. */
5895 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
5896 && tree_ssa_useless_type_conversion (*from_p))
5898 *from_p = TREE_OPERAND (*from_p, 0);
5899 ret = GS_OK;
5900 changed = true;
5902 break;
5904 case COMPOUND_LITERAL_EXPR:
5906 tree complit = TREE_OPERAND (*expr_p, 1);
5907 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5908 tree decl = DECL_EXPR_DECL (decl_s);
5909 tree init = DECL_INITIAL (decl);
5911 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5912 into struct T x = { 0, 1, 2 } if the address of the
5913 compound literal has never been taken. */
5914 if (!TREE_ADDRESSABLE (complit)
5915 && !TREE_ADDRESSABLE (decl)
5916 && init)
5918 *expr_p = copy_node (*expr_p);
5919 TREE_OPERAND (*expr_p, 1) = init;
5920 return GS_OK;
5924 default:
5925 break;
5928 while (changed);
5930 return ret;
5934 /* Return true if T looks like a valid GIMPLE statement. */
5936 static bool
5937 is_gimple_stmt (tree t)
5939 const enum tree_code code = TREE_CODE (t);
5941 switch (code)
5943 case NOP_EXPR:
5944 /* The only valid NOP_EXPR is the empty statement. */
5945 return IS_EMPTY_STMT (t);
5947 case BIND_EXPR:
5948 case COND_EXPR:
5949 /* These are only valid if they're void. */
5950 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5952 case SWITCH_EXPR:
5953 case GOTO_EXPR:
5954 case RETURN_EXPR:
5955 case LABEL_EXPR:
5956 case CASE_LABEL_EXPR:
5957 case TRY_CATCH_EXPR:
5958 case TRY_FINALLY_EXPR:
5959 case EH_FILTER_EXPR:
5960 case CATCH_EXPR:
5961 case ASM_EXPR:
5962 case STATEMENT_LIST:
5963 case OACC_PARALLEL:
5964 case OACC_KERNELS:
5965 case OACC_SERIAL:
5966 case OACC_DATA:
5967 case OACC_HOST_DATA:
5968 case OACC_DECLARE:
5969 case OACC_UPDATE:
5970 case OACC_ENTER_DATA:
5971 case OACC_EXIT_DATA:
5972 case OACC_CACHE:
5973 case OMP_PARALLEL:
5974 case OMP_FOR:
5975 case OMP_SIMD:
5976 case OMP_DISTRIBUTE:
5977 case OMP_LOOP:
5978 case OACC_LOOP:
5979 case OMP_SCAN:
5980 case OMP_SCOPE:
5981 case OMP_SECTIONS:
5982 case OMP_SECTION:
5983 case OMP_SINGLE:
5984 case OMP_MASTER:
5985 case OMP_MASKED:
5986 case OMP_TASKGROUP:
5987 case OMP_ORDERED:
5988 case OMP_CRITICAL:
5989 case OMP_TASK:
5990 case OMP_TARGET:
5991 case OMP_TARGET_DATA:
5992 case OMP_TARGET_UPDATE:
5993 case OMP_TARGET_ENTER_DATA:
5994 case OMP_TARGET_EXIT_DATA:
5995 case OMP_TASKLOOP:
5996 case OMP_TEAMS:
5997 /* These are always void. */
5998 return true;
6000 case CALL_EXPR:
6001 case MODIFY_EXPR:
6002 case PREDICT_EXPR:
6003 /* These are valid regardless of their type. */
6004 return true;
6006 default:
6007 return false;
6012 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
6013 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
6015 IMPORTANT NOTE: This promotion is performed by introducing a load of the
6016 other, unmodified part of the complex object just before the total store.
6017 As a consequence, if the object is still uninitialized, an undefined value
6018 will be loaded into a register, which may result in a spurious exception
6019 if the register is floating-point and the value happens to be a signaling
6020 NaN for example. Then the fully-fledged complex operations lowering pass
6021 followed by a DCE pass are necessary in order to fix things up. */
6023 static enum gimplify_status
6024 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
6025 bool want_value)
6027 enum tree_code code, ocode;
6028 tree lhs, rhs, new_rhs, other, realpart, imagpart;
6030 lhs = TREE_OPERAND (*expr_p, 0);
6031 rhs = TREE_OPERAND (*expr_p, 1);
6032 code = TREE_CODE (lhs);
6033 lhs = TREE_OPERAND (lhs, 0);
6035 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
6036 other = build1 (ocode, TREE_TYPE (rhs), lhs);
6037 suppress_warning (other);
6038 other = get_formal_tmp_var (other, pre_p);
6040 realpart = code == REALPART_EXPR ? rhs : other;
6041 imagpart = code == REALPART_EXPR ? other : rhs;
6043 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
6044 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
6045 else
6046 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
6048 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
6049 *expr_p = (want_value) ? rhs : NULL_TREE;
6051 return GS_ALL_DONE;
6054 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6056 modify_expr
6057 : varname '=' rhs
6058 | '*' ID '=' rhs
6060 PRE_P points to the list where side effects that must happen before
6061 *EXPR_P should be stored.
6063 POST_P points to the list where side effects that must happen after
6064 *EXPR_P should be stored.
6066 WANT_VALUE is nonzero iff we want to use the value of this expression
6067 in another expression. */
6069 static enum gimplify_status
6070 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6071 bool want_value)
6073 tree *from_p = &TREE_OPERAND (*expr_p, 1);
6074 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6075 enum gimplify_status ret = GS_UNHANDLED;
6076 gimple *assign;
6077 location_t loc = EXPR_LOCATION (*expr_p);
6078 gimple_stmt_iterator gsi;
6080 if (error_operand_p (*from_p) || error_operand_p (*to_p))
6081 return GS_ERROR;
6083 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
6084 || TREE_CODE (*expr_p) == INIT_EXPR);
6086 /* Trying to simplify a clobber using normal logic doesn't work,
6087 so handle it here. */
6088 if (TREE_CLOBBER_P (*from_p))
6090 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6091 if (ret == GS_ERROR)
6092 return ret;
6093 gcc_assert (!want_value);
6094 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
6096 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
6097 pre_p, post_p);
6098 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
6100 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
6101 *expr_p = NULL;
6102 return GS_ALL_DONE;
6105 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6106 memset. */
6107 if (TREE_TYPE (*from_p) != error_mark_node
6108 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p))
6109 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p)))
6110 && TREE_CODE (*from_p) == CONSTRUCTOR
6111 && CONSTRUCTOR_NELTS (*from_p) == 0)
6113 maybe_with_size_expr (from_p);
6114 gcc_assert (TREE_CODE (*from_p) == WITH_SIZE_EXPR);
6115 return gimplify_modify_expr_to_memset (expr_p,
6116 TREE_OPERAND (*from_p, 1),
6117 want_value, pre_p);
6120 /* Insert pointer conversions required by the middle-end that are not
6121 required by the frontend. This fixes middle-end type checking for
6122 for example gcc.dg/redecl-6.c. */
6123 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
6125 STRIP_USELESS_TYPE_CONVERSION (*from_p);
6126 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
6127 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
6130 /* See if any simplifications can be done based on what the RHS is. */
6131 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6132 want_value);
6133 if (ret != GS_UNHANDLED)
6134 return ret;
6136 /* For empty types only gimplify the left hand side and right hand
6137 side as statements and throw away the assignment. Do this after
6138 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6139 types properly. */
6140 if (is_empty_type (TREE_TYPE (*from_p))
6141 && !want_value
6142 /* Don't do this for calls that return addressable types, expand_call
6143 relies on those having a lhs. */
6144 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
6145 && TREE_CODE (*from_p) == CALL_EXPR))
6147 gimplify_stmt (from_p, pre_p);
6148 gimplify_stmt (to_p, pre_p);
6149 *expr_p = NULL_TREE;
6150 return GS_ALL_DONE;
6153 /* If the value being copied is of variable width, compute the length
6154 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6155 before gimplifying any of the operands so that we can resolve any
6156 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6157 the size of the expression to be copied, not of the destination, so
6158 that is what we must do here. */
6159 maybe_with_size_expr (from_p);
6161 /* As a special case, we have to temporarily allow for assignments
6162 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6163 a toplevel statement, when gimplifying the GENERIC expression
6164 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6165 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6167 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6168 prevent gimplify_expr from trying to create a new temporary for
6169 foo's LHS, we tell it that it should only gimplify until it
6170 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6171 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6172 and all we need to do here is set 'a' to be its LHS. */
6174 /* Gimplify the RHS first for C++17 and bug 71104. */
6175 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
6176 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
6177 if (ret == GS_ERROR)
6178 return ret;
6180 /* Then gimplify the LHS. */
6181 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6182 twice we have to make sure to gimplify into non-SSA as otherwise
6183 the abnormal edge added later will make those defs not dominate
6184 their uses.
6185 ??? Technically this applies only to the registers used in the
6186 resulting non-register *TO_P. */
6187 bool saved_into_ssa = gimplify_ctxp->into_ssa;
6188 if (saved_into_ssa
6189 && TREE_CODE (*from_p) == CALL_EXPR
6190 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
6191 gimplify_ctxp->into_ssa = false;
6192 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6193 gimplify_ctxp->into_ssa = saved_into_ssa;
6194 if (ret == GS_ERROR)
6195 return ret;
6197 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6198 guess for the predicate was wrong. */
6199 gimple_predicate final_pred = rhs_predicate_for (*to_p);
6200 if (final_pred != initial_pred)
6202 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
6203 if (ret == GS_ERROR)
6204 return ret;
6207 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6208 size as argument to the call. */
6209 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6211 tree call = TREE_OPERAND (*from_p, 0);
6212 tree vlasize = TREE_OPERAND (*from_p, 1);
6214 if (TREE_CODE (call) == CALL_EXPR
6215 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
6217 int nargs = call_expr_nargs (call);
6218 tree type = TREE_TYPE (call);
6219 tree ap = CALL_EXPR_ARG (call, 0);
6220 tree tag = CALL_EXPR_ARG (call, 1);
6221 tree aptag = CALL_EXPR_ARG (call, 2);
6222 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
6223 IFN_VA_ARG, type,
6224 nargs + 1, ap, tag,
6225 aptag, vlasize);
6226 TREE_OPERAND (*from_p, 0) = newcall;
6230 /* Now see if the above changed *from_p to something we handle specially. */
6231 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6232 want_value);
6233 if (ret != GS_UNHANDLED)
6234 return ret;
6236 /* If we've got a variable sized assignment between two lvalues (i.e. does
6237 not involve a call), then we can make things a bit more straightforward
6238 by converting the assignment to memcpy or memset. */
6239 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6241 tree from = TREE_OPERAND (*from_p, 0);
6242 tree size = TREE_OPERAND (*from_p, 1);
6244 if (TREE_CODE (from) == CONSTRUCTOR)
6245 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
6247 if (is_gimple_addressable (from))
6249 *from_p = from;
6250 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
6251 pre_p);
6255 /* Transform partial stores to non-addressable complex variables into
6256 total stores. This allows us to use real instead of virtual operands
6257 for these variables, which improves optimization. */
6258 if ((TREE_CODE (*to_p) == REALPART_EXPR
6259 || TREE_CODE (*to_p) == IMAGPART_EXPR)
6260 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
6261 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
6263 /* Try to alleviate the effects of the gimplification creating artificial
6264 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6265 make sure not to create DECL_DEBUG_EXPR links across functions. */
6266 if (!gimplify_ctxp->into_ssa
6267 && VAR_P (*from_p)
6268 && DECL_IGNORED_P (*from_p)
6269 && DECL_P (*to_p)
6270 && !DECL_IGNORED_P (*to_p)
6271 && decl_function_context (*to_p) == current_function_decl
6272 && decl_function_context (*from_p) == current_function_decl)
6274 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
6275 DECL_NAME (*from_p)
6276 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
6277 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
6278 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
6281 if (want_value && TREE_THIS_VOLATILE (*to_p))
6282 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
6284 if (TREE_CODE (*from_p) == CALL_EXPR)
6286 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6287 instead of a GIMPLE_ASSIGN. */
6288 gcall *call_stmt;
6289 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
6291 /* Gimplify internal functions created in the FEs. */
6292 int nargs = call_expr_nargs (*from_p), i;
6293 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
6294 auto_vec<tree> vargs (nargs);
6296 for (i = 0; i < nargs; i++)
6298 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
6299 EXPR_LOCATION (*from_p));
6300 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
6302 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
6303 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
6304 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
6306 else
6308 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
6309 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
6310 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
6311 tree fndecl = get_callee_fndecl (*from_p);
6312 if (fndecl
6313 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
6314 && call_expr_nargs (*from_p) == 3)
6315 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
6316 CALL_EXPR_ARG (*from_p, 0),
6317 CALL_EXPR_ARG (*from_p, 1),
6318 CALL_EXPR_ARG (*from_p, 2));
6319 else
6321 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
6324 notice_special_calls (call_stmt);
6325 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
6326 gimple_call_set_lhs (call_stmt, *to_p);
6327 else if (TREE_CODE (*to_p) == SSA_NAME)
6328 /* The above is somewhat premature, avoid ICEing later for a
6329 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6330 ??? This doesn't make it a default-def. */
6331 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
6333 assign = call_stmt;
6335 else
6337 assign = gimple_build_assign (*to_p, *from_p);
6338 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
6339 if (COMPARISON_CLASS_P (*from_p))
6340 copy_warning (assign, *from_p);
6343 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6345 /* We should have got an SSA name from the start. */
6346 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
6347 || ! gimple_in_ssa_p (cfun));
6350 gimplify_seq_add_stmt (pre_p, assign);
6351 gsi = gsi_last (*pre_p);
6352 maybe_fold_stmt (&gsi);
6354 if (want_value)
6356 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
6357 return GS_OK;
6359 else
6360 *expr_p = NULL;
6362 return GS_ALL_DONE;
6365 /* Gimplify a comparison between two variable-sized objects. Do this
6366 with a call to BUILT_IN_MEMCMP. */
6368 static enum gimplify_status
6369 gimplify_variable_sized_compare (tree *expr_p)
6371 location_t loc = EXPR_LOCATION (*expr_p);
6372 tree op0 = TREE_OPERAND (*expr_p, 0);
6373 tree op1 = TREE_OPERAND (*expr_p, 1);
6374 tree t, arg, dest, src, expr;
6376 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6377 arg = unshare_expr (arg);
6378 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6379 src = build_fold_addr_expr_loc (loc, op1);
6380 dest = build_fold_addr_expr_loc (loc, op0);
6381 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6382 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6384 expr
6385 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6386 SET_EXPR_LOCATION (expr, loc);
6387 *expr_p = expr;
6389 return GS_OK;
6392 /* Gimplify a comparison between two aggregate objects of integral scalar
6393 mode as a comparison between the bitwise equivalent scalar values. */
6395 static enum gimplify_status
6396 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6398 location_t loc = EXPR_LOCATION (*expr_p);
6399 tree op0 = TREE_OPERAND (*expr_p, 0);
6400 tree op1 = TREE_OPERAND (*expr_p, 1);
6402 tree type = TREE_TYPE (op0);
6403 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6405 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6406 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6408 *expr_p
6409 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6411 return GS_OK;
6414 /* Gimplify an expression sequence. This function gimplifies each
6415 expression and rewrites the original expression with the last
6416 expression of the sequence in GIMPLE form.
6418 PRE_P points to the list where the side effects for all the
6419 expressions in the sequence will be emitted.
6421 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6423 static enum gimplify_status
6424 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6426 tree t = *expr_p;
6430 tree *sub_p = &TREE_OPERAND (t, 0);
6432 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6433 gimplify_compound_expr (sub_p, pre_p, false);
6434 else
6435 gimplify_stmt (sub_p, pre_p);
6437 t = TREE_OPERAND (t, 1);
6439 while (TREE_CODE (t) == COMPOUND_EXPR);
6441 *expr_p = t;
6442 if (want_value)
6443 return GS_OK;
6444 else
6446 gimplify_stmt (expr_p, pre_p);
6447 return GS_ALL_DONE;
6451 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6452 gimplify. After gimplification, EXPR_P will point to a new temporary
6453 that holds the original value of the SAVE_EXPR node.
6455 PRE_P points to the list where side effects that must happen before
6456 *EXPR_P should be stored. */
6458 static enum gimplify_status
6459 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6461 enum gimplify_status ret = GS_ALL_DONE;
6462 tree val;
6464 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6465 val = TREE_OPERAND (*expr_p, 0);
6467 if (val && TREE_TYPE (val) == error_mark_node)
6468 return GS_ERROR;
6470 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6471 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6473 /* The operand may be a void-valued expression. It is
6474 being executed only for its side-effects. */
6475 if (TREE_TYPE (val) == void_type_node)
6477 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6478 is_gimple_stmt, fb_none);
6479 val = NULL;
6481 else
6482 /* The temporary may not be an SSA name as later abnormal and EH
6483 control flow may invalidate use/def domination. When in SSA
6484 form then assume there are no such issues and SAVE_EXPRs only
6485 appear via GENERIC foldings. */
6486 val = get_initialized_tmp_var (val, pre_p, post_p,
6487 gimple_in_ssa_p (cfun));
6489 TREE_OPERAND (*expr_p, 0) = val;
6490 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6493 *expr_p = val;
6495 return ret;
6498 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6500 unary_expr
6501 : ...
6502 | '&' varname
6505 PRE_P points to the list where side effects that must happen before
6506 *EXPR_P should be stored.
6508 POST_P points to the list where side effects that must happen after
6509 *EXPR_P should be stored. */
6511 static enum gimplify_status
6512 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6514 tree expr = *expr_p;
6515 tree op0 = TREE_OPERAND (expr, 0);
6516 enum gimplify_status ret;
6517 location_t loc = EXPR_LOCATION (*expr_p);
6519 switch (TREE_CODE (op0))
6521 case INDIRECT_REF:
6522 do_indirect_ref:
6523 /* Check if we are dealing with an expression of the form '&*ptr'.
6524 While the front end folds away '&*ptr' into 'ptr', these
6525 expressions may be generated internally by the compiler (e.g.,
6526 builtins like __builtin_va_end). */
6527 /* Caution: the silent array decomposition semantics we allow for
6528 ADDR_EXPR means we can't always discard the pair. */
6529 /* Gimplification of the ADDR_EXPR operand may drop
6530 cv-qualification conversions, so make sure we add them if
6531 needed. */
6533 tree op00 = TREE_OPERAND (op0, 0);
6534 tree t_expr = TREE_TYPE (expr);
6535 tree t_op00 = TREE_TYPE (op00);
6537 if (!useless_type_conversion_p (t_expr, t_op00))
6538 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6539 *expr_p = op00;
6540 ret = GS_OK;
6542 break;
6544 case VIEW_CONVERT_EXPR:
6545 /* Take the address of our operand and then convert it to the type of
6546 this ADDR_EXPR.
6548 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6549 all clear. The impact of this transformation is even less clear. */
6551 /* If the operand is a useless conversion, look through it. Doing so
6552 guarantees that the ADDR_EXPR and its operand will remain of the
6553 same type. */
6554 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6555 op0 = TREE_OPERAND (op0, 0);
6557 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6558 build_fold_addr_expr_loc (loc,
6559 TREE_OPERAND (op0, 0)));
6560 ret = GS_OK;
6561 break;
6563 case MEM_REF:
6564 if (integer_zerop (TREE_OPERAND (op0, 1)))
6565 goto do_indirect_ref;
6567 /* fall through */
6569 default:
6570 /* If we see a call to a declared builtin or see its address
6571 being taken (we can unify those cases here) then we can mark
6572 the builtin for implicit generation by GCC. */
6573 if (TREE_CODE (op0) == FUNCTION_DECL
6574 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6575 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6576 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6578 /* We use fb_either here because the C frontend sometimes takes
6579 the address of a call that returns a struct; see
6580 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6581 the implied temporary explicit. */
6583 /* Make the operand addressable. */
6584 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6585 is_gimple_addressable, fb_either);
6586 if (ret == GS_ERROR)
6587 break;
6589 /* Then mark it. Beware that it may not be possible to do so directly
6590 if a temporary has been created by the gimplification. */
6591 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6593 op0 = TREE_OPERAND (expr, 0);
6595 /* For various reasons, the gimplification of the expression
6596 may have made a new INDIRECT_REF. */
6597 if (INDIRECT_REF_P (op0)
6598 || (TREE_CODE (op0) == MEM_REF
6599 && integer_zerop (TREE_OPERAND (op0, 1))))
6600 goto do_indirect_ref;
6602 mark_addressable (TREE_OPERAND (expr, 0));
6604 /* The FEs may end up building ADDR_EXPRs early on a decl with
6605 an incomplete type. Re-build ADDR_EXPRs in canonical form
6606 here. */
6607 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6608 *expr_p = build_fold_addr_expr (op0);
6610 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6611 recompute_tree_invariant_for_addr_expr (*expr_p);
6613 /* If we re-built the ADDR_EXPR add a conversion to the original type
6614 if required. */
6615 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6616 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6618 break;
6621 return ret;
6624 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6625 value; output operands should be a gimple lvalue. */
6627 static enum gimplify_status
6628 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6630 tree expr;
6631 int noutputs;
6632 const char **oconstraints;
6633 int i;
6634 tree link;
6635 const char *constraint;
6636 bool allows_mem, allows_reg, is_inout;
6637 enum gimplify_status ret, tret;
6638 gasm *stmt;
6639 vec<tree, va_gc> *inputs;
6640 vec<tree, va_gc> *outputs;
6641 vec<tree, va_gc> *clobbers;
6642 vec<tree, va_gc> *labels;
6643 tree link_next;
6645 expr = *expr_p;
6646 noutputs = list_length (ASM_OUTPUTS (expr));
6647 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6649 inputs = NULL;
6650 outputs = NULL;
6651 clobbers = NULL;
6652 labels = NULL;
6654 ret = GS_ALL_DONE;
6655 link_next = NULL_TREE;
6656 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6658 bool ok;
6659 size_t constraint_len;
6661 link_next = TREE_CHAIN (link);
6663 oconstraints[i]
6664 = constraint
6665 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6666 constraint_len = strlen (constraint);
6667 if (constraint_len == 0)
6668 continue;
6670 ok = parse_output_constraint (&constraint, i, 0, 0,
6671 &allows_mem, &allows_reg, &is_inout);
6672 if (!ok)
6674 ret = GS_ERROR;
6675 is_inout = false;
6678 /* If we can't make copies, we can only accept memory.
6679 Similarly for VLAs. */
6680 tree outtype = TREE_TYPE (TREE_VALUE (link));
6681 if (outtype != error_mark_node
6682 && (TREE_ADDRESSABLE (outtype)
6683 || !COMPLETE_TYPE_P (outtype)
6684 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6686 if (allows_mem)
6687 allows_reg = 0;
6688 else
6690 error ("impossible constraint in %<asm%>");
6691 error ("non-memory output %d must stay in memory", i);
6692 return GS_ERROR;
6696 if (!allows_reg && allows_mem)
6697 mark_addressable (TREE_VALUE (link));
6699 tree orig = TREE_VALUE (link);
6700 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6701 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6702 fb_lvalue | fb_mayfail);
6703 if (tret == GS_ERROR)
6705 if (orig != error_mark_node)
6706 error ("invalid lvalue in %<asm%> output %d", i);
6707 ret = tret;
6710 /* If the constraint does not allow memory make sure we gimplify
6711 it to a register if it is not already but its base is. This
6712 happens for complex and vector components. */
6713 if (!allows_mem)
6715 tree op = TREE_VALUE (link);
6716 if (! is_gimple_val (op)
6717 && is_gimple_reg_type (TREE_TYPE (op))
6718 && is_gimple_reg (get_base_address (op)))
6720 tree tem = create_tmp_reg (TREE_TYPE (op));
6721 tree ass;
6722 if (is_inout)
6724 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6725 tem, unshare_expr (op));
6726 gimplify_and_add (ass, pre_p);
6728 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6729 gimplify_and_add (ass, post_p);
6731 TREE_VALUE (link) = tem;
6732 tret = GS_OK;
6736 vec_safe_push (outputs, link);
6737 TREE_CHAIN (link) = NULL_TREE;
6739 if (is_inout)
6741 /* An input/output operand. To give the optimizers more
6742 flexibility, split it into separate input and output
6743 operands. */
6744 tree input;
6745 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6746 char buf[11];
6748 /* Turn the in/out constraint into an output constraint. */
6749 char *p = xstrdup (constraint);
6750 p[0] = '=';
6751 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6753 /* And add a matching input constraint. */
6754 if (allows_reg)
6756 sprintf (buf, "%u", i);
6758 /* If there are multiple alternatives in the constraint,
6759 handle each of them individually. Those that allow register
6760 will be replaced with operand number, the others will stay
6761 unchanged. */
6762 if (strchr (p, ',') != NULL)
6764 size_t len = 0, buflen = strlen (buf);
6765 char *beg, *end, *str, *dst;
6767 for (beg = p + 1;;)
6769 end = strchr (beg, ',');
6770 if (end == NULL)
6771 end = strchr (beg, '\0');
6772 if ((size_t) (end - beg) < buflen)
6773 len += buflen + 1;
6774 else
6775 len += end - beg + 1;
6776 if (*end)
6777 beg = end + 1;
6778 else
6779 break;
6782 str = (char *) alloca (len);
6783 for (beg = p + 1, dst = str;;)
6785 const char *tem;
6786 bool mem_p, reg_p, inout_p;
6788 end = strchr (beg, ',');
6789 if (end)
6790 *end = '\0';
6791 beg[-1] = '=';
6792 tem = beg - 1;
6793 parse_output_constraint (&tem, i, 0, 0,
6794 &mem_p, &reg_p, &inout_p);
6795 if (dst != str)
6796 *dst++ = ',';
6797 if (reg_p)
6799 memcpy (dst, buf, buflen);
6800 dst += buflen;
6802 else
6804 if (end)
6805 len = end - beg;
6806 else
6807 len = strlen (beg);
6808 memcpy (dst, beg, len);
6809 dst += len;
6811 if (end)
6812 beg = end + 1;
6813 else
6814 break;
6816 *dst = '\0';
6817 input = build_string (dst - str, str);
6819 else
6820 input = build_string (strlen (buf), buf);
6822 else
6823 input = build_string (constraint_len - 1, constraint + 1);
6825 free (p);
6827 input = build_tree_list (build_tree_list (NULL_TREE, input),
6828 unshare_expr (TREE_VALUE (link)));
6829 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6833 link_next = NULL_TREE;
6834 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6836 link_next = TREE_CHAIN (link);
6837 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6838 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6839 oconstraints, &allows_mem, &allows_reg);
6841 /* If we can't make copies, we can only accept memory. */
6842 tree intype = TREE_TYPE (TREE_VALUE (link));
6843 if (intype != error_mark_node
6844 && (TREE_ADDRESSABLE (intype)
6845 || !COMPLETE_TYPE_P (intype)
6846 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6848 if (allows_mem)
6849 allows_reg = 0;
6850 else
6852 error ("impossible constraint in %<asm%>");
6853 error ("non-memory input %d must stay in memory", i);
6854 return GS_ERROR;
6858 /* If the operand is a memory input, it should be an lvalue. */
6859 if (!allows_reg && allows_mem)
6861 tree inputv = TREE_VALUE (link);
6862 STRIP_NOPS (inputv);
6863 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6864 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6865 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6866 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6867 || TREE_CODE (inputv) == MODIFY_EXPR)
6868 TREE_VALUE (link) = error_mark_node;
6869 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6870 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6871 if (tret != GS_ERROR)
6873 /* Unlike output operands, memory inputs are not guaranteed
6874 to be lvalues by the FE, and while the expressions are
6875 marked addressable there, if it is e.g. a statement
6876 expression, temporaries in it might not end up being
6877 addressable. They might be already used in the IL and thus
6878 it is too late to make them addressable now though. */
6879 tree x = TREE_VALUE (link);
6880 while (handled_component_p (x))
6881 x = TREE_OPERAND (x, 0);
6882 if (TREE_CODE (x) == MEM_REF
6883 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6884 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6885 if ((VAR_P (x)
6886 || TREE_CODE (x) == PARM_DECL
6887 || TREE_CODE (x) == RESULT_DECL)
6888 && !TREE_ADDRESSABLE (x)
6889 && is_gimple_reg (x))
6891 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6892 input_location), 0,
6893 "memory input %d is not directly addressable",
6895 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6898 mark_addressable (TREE_VALUE (link));
6899 if (tret == GS_ERROR)
6901 if (inputv != error_mark_node)
6902 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6903 "memory input %d is not directly addressable", i);
6904 ret = tret;
6907 else
6909 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6910 is_gimple_asm_val, fb_rvalue);
6911 if (tret == GS_ERROR)
6912 ret = tret;
6915 TREE_CHAIN (link) = NULL_TREE;
6916 vec_safe_push (inputs, link);
6919 link_next = NULL_TREE;
6920 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6922 link_next = TREE_CHAIN (link);
6923 TREE_CHAIN (link) = NULL_TREE;
6924 vec_safe_push (clobbers, link);
6927 link_next = NULL_TREE;
6928 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6930 link_next = TREE_CHAIN (link);
6931 TREE_CHAIN (link) = NULL_TREE;
6932 vec_safe_push (labels, link);
6935 /* Do not add ASMs with errors to the gimple IL stream. */
6936 if (ret != GS_ERROR)
6938 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6939 inputs, outputs, clobbers, labels);
6941 /* asm is volatile if it was marked by the user as volatile or
6942 there are no outputs or this is an asm goto. */
6943 gimple_asm_set_volatile (stmt,
6944 ASM_VOLATILE_P (expr)
6945 || noutputs == 0
6946 || labels);
6947 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6948 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6950 gimplify_seq_add_stmt (pre_p, stmt);
6953 return ret;
6956 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6957 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6958 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6959 return to this function.
6961 FIXME should we complexify the prequeue handling instead? Or use flags
6962 for all the cleanups and let the optimizer tighten them up? The current
6963 code seems pretty fragile; it will break on a cleanup within any
6964 non-conditional nesting. But any such nesting would be broken, anyway;
6965 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6966 and continues out of it. We can do that at the RTL level, though, so
6967 having an optimizer to tighten up try/finally regions would be a Good
6968 Thing. */
6970 static enum gimplify_status
6971 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6973 gimple_stmt_iterator iter;
6974 gimple_seq body_sequence = NULL;
6976 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6978 /* We only care about the number of conditions between the innermost
6979 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6980 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6981 int old_conds = gimplify_ctxp->conditions;
6982 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6983 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6984 gimplify_ctxp->conditions = 0;
6985 gimplify_ctxp->conditional_cleanups = NULL;
6986 gimplify_ctxp->in_cleanup_point_expr = true;
6988 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6990 gimplify_ctxp->conditions = old_conds;
6991 gimplify_ctxp->conditional_cleanups = old_cleanups;
6992 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6994 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6996 gimple *wce = gsi_stmt (iter);
6998 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
7000 if (gsi_one_before_end_p (iter))
7002 /* Note that gsi_insert_seq_before and gsi_remove do not
7003 scan operands, unlike some other sequence mutators. */
7004 if (!gimple_wce_cleanup_eh_only (wce))
7005 gsi_insert_seq_before_without_update (&iter,
7006 gimple_wce_cleanup (wce),
7007 GSI_SAME_STMT);
7008 gsi_remove (&iter, true);
7009 break;
7011 else
7013 gtry *gtry;
7014 gimple_seq seq;
7015 enum gimple_try_flags kind;
7017 if (gimple_wce_cleanup_eh_only (wce))
7018 kind = GIMPLE_TRY_CATCH;
7019 else
7020 kind = GIMPLE_TRY_FINALLY;
7021 seq = gsi_split_seq_after (iter);
7023 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
7024 /* Do not use gsi_replace here, as it may scan operands.
7025 We want to do a simple structural modification only. */
7026 gsi_set_stmt (&iter, gtry);
7027 iter = gsi_start (gtry->eval);
7030 else
7031 gsi_next (&iter);
7034 gimplify_seq_add_seq (pre_p, body_sequence);
7035 if (temp)
7037 *expr_p = temp;
7038 return GS_OK;
7040 else
7042 *expr_p = NULL;
7043 return GS_ALL_DONE;
7047 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7048 is the cleanup action required. EH_ONLY is true if the cleanup should
7049 only be executed if an exception is thrown, not on normal exit.
7050 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7051 only valid for clobbers. */
7053 static void
7054 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
7055 bool force_uncond = false)
7057 gimple *wce;
7058 gimple_seq cleanup_stmts = NULL;
7060 /* Errors can result in improperly nested cleanups. Which results in
7061 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7062 if (seen_error ())
7063 return;
7065 if (gimple_conditional_context ())
7067 /* If we're in a conditional context, this is more complex. We only
7068 want to run the cleanup if we actually ran the initialization that
7069 necessitates it, but we want to run it after the end of the
7070 conditional context. So we wrap the try/finally around the
7071 condition and use a flag to determine whether or not to actually
7072 run the destructor. Thus
7074 test ? f(A()) : 0
7076 becomes (approximately)
7078 flag = 0;
7079 try {
7080 if (test) { A::A(temp); flag = 1; val = f(temp); }
7081 else { val = 0; }
7082 } finally {
7083 if (flag) A::~A(temp);
7087 if (force_uncond)
7089 gimplify_stmt (&cleanup, &cleanup_stmts);
7090 wce = gimple_build_wce (cleanup_stmts);
7091 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7093 else
7095 tree flag = create_tmp_var (boolean_type_node, "cleanup");
7096 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
7097 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
7099 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
7100 gimplify_stmt (&cleanup, &cleanup_stmts);
7101 wce = gimple_build_wce (cleanup_stmts);
7102 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7104 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
7105 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7106 gimplify_seq_add_stmt (pre_p, ftrue);
7108 /* Because of this manipulation, and the EH edges that jump
7109 threading cannot redirect, the temporary (VAR) will appear
7110 to be used uninitialized. Don't warn. */
7111 suppress_warning (var, OPT_Wuninitialized);
7114 else
7116 gimplify_stmt (&cleanup, &cleanup_stmts);
7117 wce = gimple_build_wce (cleanup_stmts);
7118 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7119 gimplify_seq_add_stmt (pre_p, wce);
7123 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7125 static enum gimplify_status
7126 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7128 tree targ = *expr_p;
7129 tree temp = TARGET_EXPR_SLOT (targ);
7130 tree init = TARGET_EXPR_INITIAL (targ);
7131 enum gimplify_status ret;
7133 bool unpoison_empty_seq = false;
7134 gimple_stmt_iterator unpoison_it;
7136 if (init)
7138 gimple_seq init_pre_p = NULL;
7140 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7141 to the temps list. Handle also variable length TARGET_EXPRs. */
7142 if (!poly_int_tree_p (DECL_SIZE (temp)))
7144 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
7145 gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
7146 /* FIXME: this is correct only when the size of the type does
7147 not depend on expressions evaluated in init. */
7148 gimplify_vla_decl (temp, &init_pre_p);
7150 else
7152 /* Save location where we need to place unpoisoning. It's possible
7153 that a variable will be converted to needs_to_live_in_memory. */
7154 unpoison_it = gsi_last (*pre_p);
7155 unpoison_empty_seq = gsi_end_p (unpoison_it);
7157 gimple_add_tmp_var (temp);
7160 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7161 expression is supposed to initialize the slot. */
7162 if (VOID_TYPE_P (TREE_TYPE (init)))
7163 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7164 fb_none);
7165 else
7167 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
7168 init = init_expr;
7169 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7170 fb_none);
7171 init = NULL;
7172 ggc_free (init_expr);
7174 if (ret == GS_ERROR)
7176 /* PR c++/28266 Make sure this is expanded only once. */
7177 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7178 return GS_ERROR;
7181 if (init)
7182 gimplify_and_add (init, &init_pre_p);
7184 /* Add a clobber for the temporary going out of scope, like
7185 gimplify_bind_expr. But only if we did not promote the
7186 temporary to static storage. */
7187 if (gimplify_ctxp->in_cleanup_point_expr
7188 && !TREE_STATIC (temp)
7189 && needs_to_live_in_memory (temp))
7191 if (flag_stack_reuse == SR_ALL)
7193 tree clobber = build_clobber (TREE_TYPE (temp), CLOBBER_EOL);
7194 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
7195 gimple_push_cleanup (temp, clobber, false, pre_p, true);
7197 if (asan_poisoned_variables
7198 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
7199 && !TREE_STATIC (temp)
7200 && dbg_cnt (asan_use_after_scope)
7201 && !gimplify_omp_ctxp)
7203 tree asan_cleanup = build_asan_poison_call_expr (temp);
7204 if (asan_cleanup)
7206 if (unpoison_empty_seq)
7207 unpoison_it = gsi_start (*pre_p);
7209 asan_poison_variable (temp, false, &unpoison_it,
7210 unpoison_empty_seq);
7211 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
7216 gimple_seq_add_seq (pre_p, init_pre_p);
7218 /* If needed, push the cleanup for the temp. */
7219 if (TARGET_EXPR_CLEANUP (targ))
7220 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
7221 CLEANUP_EH_ONLY (targ), pre_p);
7223 /* Only expand this once. */
7224 TREE_OPERAND (targ, 3) = init;
7225 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7227 else
7228 /* We should have expanded this before. */
7229 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
7231 *expr_p = temp;
7232 return GS_OK;
7235 /* Gimplification of expression trees. */
7237 /* Gimplify an expression which appears at statement context. The
7238 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7239 NULL, a new sequence is allocated.
7241 Return true if we actually added a statement to the queue. */
7243 bool
7244 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
7246 gimple_seq_node last;
7248 last = gimple_seq_last (*seq_p);
7249 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
7250 return last != gimple_seq_last (*seq_p);
7253 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7254 to CTX. If entries already exist, force them to be some flavor of private.
7255 If there is no enclosing parallel, do nothing. */
7257 void
7258 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
7260 splay_tree_node n;
7262 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
7263 return;
7267 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7268 if (n != NULL)
7270 if (n->value & GOVD_SHARED)
7271 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
7272 else if (n->value & GOVD_MAP)
7273 n->value |= GOVD_MAP_TO_ONLY;
7274 else
7275 return;
7277 else if ((ctx->region_type & ORT_TARGET) != 0)
7279 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
7280 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7281 else
7282 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
7284 else if (ctx->region_type != ORT_WORKSHARE
7285 && ctx->region_type != ORT_TASKGROUP
7286 && ctx->region_type != ORT_SIMD
7287 && ctx->region_type != ORT_ACC
7288 && !(ctx->region_type & ORT_TARGET_DATA))
7289 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7291 ctx = ctx->outer_context;
7293 while (ctx);
7296 /* Similarly for each of the type sizes of TYPE. */
7298 static void
7299 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
7301 if (type == NULL || type == error_mark_node)
7302 return;
7303 type = TYPE_MAIN_VARIANT (type);
7305 if (ctx->privatized_types->add (type))
7306 return;
7308 switch (TREE_CODE (type))
7310 case INTEGER_TYPE:
7311 case ENUMERAL_TYPE:
7312 case BOOLEAN_TYPE:
7313 case REAL_TYPE:
7314 case FIXED_POINT_TYPE:
7315 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
7316 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
7317 break;
7319 case ARRAY_TYPE:
7320 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7321 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
7322 break;
7324 case RECORD_TYPE:
7325 case UNION_TYPE:
7326 case QUAL_UNION_TYPE:
7328 tree field;
7329 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7330 if (TREE_CODE (field) == FIELD_DECL)
7332 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
7333 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
7336 break;
7338 case POINTER_TYPE:
7339 case REFERENCE_TYPE:
7340 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7341 break;
7343 default:
7344 break;
7347 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
7348 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
7349 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
7352 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7354 static void
7355 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
7357 splay_tree_node n;
7358 unsigned int nflags;
7359 tree t;
7361 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
7362 return;
7364 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7365 there are constructors involved somewhere. Exception is a shared clause,
7366 there is nothing privatized in that case. */
7367 if ((flags & GOVD_SHARED) == 0
7368 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
7369 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
7370 flags |= GOVD_SEEN;
7372 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7373 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7375 /* We shouldn't be re-adding the decl with the same data
7376 sharing class. */
7377 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
7378 nflags = n->value | flags;
7379 /* The only combination of data sharing classes we should see is
7380 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7381 reduction variables to be used in data sharing clauses. */
7382 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7383 || ((nflags & GOVD_DATA_SHARE_CLASS)
7384 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7385 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7386 n->value = nflags;
7387 return;
7390 /* When adding a variable-sized variable, we have to handle all sorts
7391 of additional bits of data: the pointer replacement variable, and
7392 the parameters of the type. */
7393 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7395 /* Add the pointer replacement variable as PRIVATE if the variable
7396 replacement is private, else FIRSTPRIVATE since we'll need the
7397 address of the original variable either for SHARED, or for the
7398 copy into or out of the context. */
7399 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7401 if (flags & GOVD_MAP)
7402 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7403 else if (flags & GOVD_PRIVATE)
7404 nflags = GOVD_PRIVATE;
7405 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7406 && (flags & GOVD_FIRSTPRIVATE))
7407 || (ctx->region_type == ORT_TARGET_DATA
7408 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7409 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7410 else
7411 nflags = GOVD_FIRSTPRIVATE;
7412 nflags |= flags & GOVD_SEEN;
7413 t = DECL_VALUE_EXPR (decl);
7414 gcc_assert (INDIRECT_REF_P (t));
7415 t = TREE_OPERAND (t, 0);
7416 gcc_assert (DECL_P (t));
7417 omp_add_variable (ctx, t, nflags);
7420 /* Add all of the variable and type parameters (which should have
7421 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7422 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7423 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7424 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7426 /* The variable-sized variable itself is never SHARED, only some form
7427 of PRIVATE. The sharing would take place via the pointer variable
7428 which we remapped above. */
7429 if (flags & GOVD_SHARED)
7430 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7431 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7433 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7434 alloca statement we generate for the variable, so make sure it
7435 is available. This isn't automatically needed for the SHARED
7436 case, since we won't be allocating local storage then.
7437 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7438 in this case omp_notice_variable will be called later
7439 on when it is gimplified. */
7440 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7441 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7442 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7444 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7445 && omp_privatize_by_reference (decl))
7447 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7449 /* Similar to the direct variable sized case above, we'll need the
7450 size of references being privatized. */
7451 if ((flags & GOVD_SHARED) == 0)
7453 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7454 if (t && DECL_P (t))
7455 omp_notice_variable (ctx, t, true);
7459 if (n != NULL)
7460 n->value |= flags;
7461 else
7462 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7464 /* For reductions clauses in OpenACC loop directives, by default create a
7465 copy clause on the enclosing parallel construct for carrying back the
7466 results. */
7467 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7469 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7470 while (outer_ctx)
7472 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7473 if (n != NULL)
7475 /* Ignore local variables and explicitly declared clauses. */
7476 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7477 break;
7478 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7480 /* According to the OpenACC spec, such a reduction variable
7481 should already have a copy map on a kernels construct,
7482 verify that here. */
7483 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7484 && (n->value & GOVD_MAP));
7486 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7488 /* Remove firstprivate and make it a copy map. */
7489 n->value &= ~GOVD_FIRSTPRIVATE;
7490 n->value |= GOVD_MAP;
7493 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7495 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7496 GOVD_MAP | GOVD_SEEN);
7497 break;
7499 outer_ctx = outer_ctx->outer_context;
7504 /* Notice a threadprivate variable DECL used in OMP context CTX.
7505 This just prints out diagnostics about threadprivate variable uses
7506 in untied tasks. If DECL2 is non-NULL, prevent this warning
7507 on that variable. */
7509 static bool
7510 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7511 tree decl2)
7513 splay_tree_node n;
7514 struct gimplify_omp_ctx *octx;
7516 for (octx = ctx; octx; octx = octx->outer_context)
7517 if ((octx->region_type & ORT_TARGET) != 0
7518 || octx->order_concurrent)
7520 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7521 if (n == NULL)
7523 if (octx->order_concurrent)
7525 error ("threadprivate variable %qE used in a region with"
7526 " %<order(concurrent)%> clause", DECL_NAME (decl));
7527 inform (octx->location, "enclosing region");
7529 else
7531 error ("threadprivate variable %qE used in target region",
7532 DECL_NAME (decl));
7533 inform (octx->location, "enclosing target region");
7535 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7537 if (decl2)
7538 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7541 if (ctx->region_type != ORT_UNTIED_TASK)
7542 return false;
7543 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7544 if (n == NULL)
7546 error ("threadprivate variable %qE used in untied task",
7547 DECL_NAME (decl));
7548 inform (ctx->location, "enclosing task");
7549 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7551 if (decl2)
7552 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7553 return false;
7556 /* Return true if global var DECL is device resident. */
7558 static bool
7559 device_resident_p (tree decl)
7561 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7563 if (!attr)
7564 return false;
7566 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7568 tree c = TREE_VALUE (t);
7569 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7570 return true;
7573 return false;
7576 /* Return true if DECL has an ACC DECLARE attribute. */
7578 static bool
7579 is_oacc_declared (tree decl)
7581 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7582 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7583 return declared != NULL_TREE;
7586 /* Determine outer default flags for DECL mentioned in an OMP region
7587 but not declared in an enclosing clause.
7589 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7590 remapped firstprivate instead of shared. To some extent this is
7591 addressed in omp_firstprivatize_type_sizes, but not
7592 effectively. */
7594 static unsigned
7595 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7596 bool in_code, unsigned flags)
7598 enum omp_clause_default_kind default_kind = ctx->default_kind;
7599 enum omp_clause_default_kind kind;
7601 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7602 if (ctx->region_type & ORT_TASK)
7604 tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
7606 /* The event-handle specified by a detach clause should always be firstprivate,
7607 regardless of the current default. */
7608 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7609 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7611 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7612 default_kind = kind;
7613 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7614 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7615 /* For C/C++ default({,first}private), variables with static storage duration
7616 declared in a namespace or global scope and referenced in construct
7617 must be explicitly specified, i.e. acts as default(none). */
7618 else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
7619 || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
7620 && VAR_P (decl)
7621 && is_global_var (decl)
7622 && (DECL_FILE_SCOPE_P (decl)
7623 || (DECL_CONTEXT (decl)
7624 && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
7625 && !lang_GNU_Fortran ())
7626 default_kind = OMP_CLAUSE_DEFAULT_NONE;
7628 switch (default_kind)
7630 case OMP_CLAUSE_DEFAULT_NONE:
7632 const char *rtype;
7634 if (ctx->region_type & ORT_PARALLEL)
7635 rtype = "parallel";
7636 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7637 rtype = "taskloop";
7638 else if (ctx->region_type & ORT_TASK)
7639 rtype = "task";
7640 else if (ctx->region_type & ORT_TEAMS)
7641 rtype = "teams";
7642 else
7643 gcc_unreachable ();
7645 error ("%qE not specified in enclosing %qs",
7646 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7647 inform (ctx->location, "enclosing %qs", rtype);
7649 /* FALLTHRU */
7650 case OMP_CLAUSE_DEFAULT_SHARED:
7651 flags |= GOVD_SHARED;
7652 break;
7653 case OMP_CLAUSE_DEFAULT_PRIVATE:
7654 flags |= GOVD_PRIVATE;
7655 break;
7656 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7657 flags |= GOVD_FIRSTPRIVATE;
7658 break;
7659 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7660 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7661 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7662 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7664 omp_notice_variable (octx, decl, in_code);
7665 for (; octx; octx = octx->outer_context)
7667 splay_tree_node n2;
7669 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7670 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7671 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7672 continue;
7673 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7675 flags |= GOVD_FIRSTPRIVATE;
7676 goto found_outer;
7678 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7680 flags |= GOVD_SHARED;
7681 goto found_outer;
7686 if (TREE_CODE (decl) == PARM_DECL
7687 || (!is_global_var (decl)
7688 && DECL_CONTEXT (decl) == current_function_decl))
7689 flags |= GOVD_FIRSTPRIVATE;
7690 else
7691 flags |= GOVD_SHARED;
7692 found_outer:
7693 break;
7695 default:
7696 gcc_unreachable ();
7699 return flags;
7703 /* Determine outer default flags for DECL mentioned in an OACC region
7704 but not declared in an enclosing clause. */
7706 static unsigned
7707 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7709 const char *rkind;
7710 bool on_device = false;
7711 bool is_private = false;
7712 bool declared = is_oacc_declared (decl);
7713 tree type = TREE_TYPE (decl);
7715 if (omp_privatize_by_reference (decl))
7716 type = TREE_TYPE (type);
7718 /* For Fortran COMMON blocks, only used variables in those blocks are
7719 transfered and remapped. The block itself will have a private clause to
7720 avoid transfering the data twice.
7721 The hook evaluates to false by default. For a variable in Fortran's COMMON
7722 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7723 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7724 the whole block. For C++ and Fortran, it can also be true under certain
7725 other conditions, if DECL_HAS_VALUE_EXPR. */
7726 if (RECORD_OR_UNION_TYPE_P (type))
7727 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7729 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7730 && is_global_var (decl)
7731 && device_resident_p (decl)
7732 && !is_private)
7734 on_device = true;
7735 flags |= GOVD_MAP_TO_ONLY;
7738 switch (ctx->region_type)
7740 case ORT_ACC_KERNELS:
7741 rkind = "kernels";
7743 if (is_private)
7744 flags |= GOVD_FIRSTPRIVATE;
7745 else if (AGGREGATE_TYPE_P (type))
7747 /* Aggregates default to 'present_or_copy', or 'present'. */
7748 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7749 flags |= GOVD_MAP;
7750 else
7751 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7753 else
7754 /* Scalars default to 'copy'. */
7755 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7757 break;
7759 case ORT_ACC_PARALLEL:
7760 case ORT_ACC_SERIAL:
7761 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7763 if (is_private)
7764 flags |= GOVD_FIRSTPRIVATE;
7765 else if (on_device || declared)
7766 flags |= GOVD_MAP;
7767 else if (AGGREGATE_TYPE_P (type))
7769 /* Aggregates default to 'present_or_copy', or 'present'. */
7770 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7771 flags |= GOVD_MAP;
7772 else
7773 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7775 else
7776 /* Scalars default to 'firstprivate'. */
7777 flags |= GOVD_FIRSTPRIVATE;
7779 break;
7781 default:
7782 gcc_unreachable ();
7785 if (DECL_ARTIFICIAL (decl))
7786 ; /* We can get compiler-generated decls, and should not complain
7787 about them. */
7788 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7790 error ("%qE not specified in enclosing OpenACC %qs construct",
7791 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7792 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7794 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7795 ; /* Handled above. */
7796 else
7797 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7799 return flags;
7802 /* Record the fact that DECL was used within the OMP context CTX.
7803 IN_CODE is true when real code uses DECL, and false when we should
7804 merely emit default(none) errors. Return true if DECL is going to
7805 be remapped and thus DECL shouldn't be gimplified into its
7806 DECL_VALUE_EXPR (if any). */
7808 static bool
7809 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7811 splay_tree_node n;
7812 unsigned flags = in_code ? GOVD_SEEN : 0;
7813 bool ret = false, shared;
7815 if (error_operand_p (decl))
7816 return false;
7818 if (ctx->region_type == ORT_NONE)
7819 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7821 if (is_global_var (decl))
7823 /* Threadprivate variables are predetermined. */
7824 if (DECL_THREAD_LOCAL_P (decl))
7825 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7827 if (DECL_HAS_VALUE_EXPR_P (decl))
7829 if (ctx->region_type & ORT_ACC)
7830 /* For OpenACC, defer expansion of value to avoid transfering
7831 privatized common block data instead of im-/explicitly transfered
7832 variables which are in common blocks. */
7834 else
7836 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7838 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7839 return omp_notice_threadprivate_variable (ctx, decl, value);
7843 if (gimplify_omp_ctxp->outer_context == NULL
7844 && VAR_P (decl)
7845 && oacc_get_fn_attrib (current_function_decl))
7847 location_t loc = DECL_SOURCE_LOCATION (decl);
7849 if (lookup_attribute ("omp declare target link",
7850 DECL_ATTRIBUTES (decl)))
7852 error_at (loc,
7853 "%qE with %<link%> clause used in %<routine%> function",
7854 DECL_NAME (decl));
7855 return false;
7857 else if (!lookup_attribute ("omp declare target",
7858 DECL_ATTRIBUTES (decl)))
7860 error_at (loc,
7861 "%qE requires a %<declare%> directive for use "
7862 "in a %<routine%> function", DECL_NAME (decl));
7863 return false;
7868 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7869 if ((ctx->region_type & ORT_TARGET) != 0)
7871 if (ctx->region_type & ORT_ACC)
7872 /* For OpenACC, as remarked above, defer expansion. */
7873 shared = false;
7874 else
7875 shared = true;
7877 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7878 if (n == NULL)
7880 unsigned nflags = flags;
7881 if ((ctx->region_type & ORT_ACC) == 0)
7883 bool is_declare_target = false;
7884 if (is_global_var (decl)
7885 && varpool_node::get_create (decl)->offloadable)
7887 struct gimplify_omp_ctx *octx;
7888 for (octx = ctx->outer_context;
7889 octx; octx = octx->outer_context)
7891 n = splay_tree_lookup (octx->variables,
7892 (splay_tree_key)decl);
7893 if (n
7894 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7895 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7896 break;
7898 is_declare_target = octx == NULL;
7900 if (!is_declare_target)
7902 int gdmk;
7903 enum omp_clause_defaultmap_kind kind;
7904 if (lang_hooks.decls.omp_allocatable_p (decl))
7905 gdmk = GDMK_ALLOCATABLE;
7906 else if (lang_hooks.decls.omp_scalar_target_p (decl))
7907 gdmk = GDMK_SCALAR_TARGET;
7908 else if (lang_hooks.decls.omp_scalar_p (decl, false))
7909 gdmk = GDMK_SCALAR;
7910 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7911 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7912 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7913 == POINTER_TYPE)))
7914 gdmk = GDMK_POINTER;
7915 else
7916 gdmk = GDMK_AGGREGATE;
7917 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
7918 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
7920 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
7921 nflags |= GOVD_FIRSTPRIVATE;
7922 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
7923 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
7924 else
7925 gcc_unreachable ();
7927 else if (ctx->defaultmap[gdmk] == 0)
7929 tree d = lang_hooks.decls.omp_report_decl (decl);
7930 error ("%qE not specified in enclosing %<target%>",
7931 DECL_NAME (d));
7932 inform (ctx->location, "enclosing %<target%>");
7934 else if (ctx->defaultmap[gdmk]
7935 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7936 nflags |= ctx->defaultmap[gdmk];
7937 else if (ctx->defaultmap[gdmk] & GOVD_MAP_FORCE_PRESENT)
7939 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7940 nflags |= ctx->defaultmap[gdmk] | GOVD_MAP_ALLOC_ONLY;
7942 else
7944 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7945 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7950 struct gimplify_omp_ctx *octx = ctx->outer_context;
7951 if ((ctx->region_type & ORT_ACC) && octx)
7953 /* Look in outer OpenACC contexts, to see if there's a
7954 data attribute for this variable. */
7955 omp_notice_variable (octx, decl, in_code);
7957 for (; octx; octx = octx->outer_context)
7959 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7960 break;
7961 splay_tree_node n2
7962 = splay_tree_lookup (octx->variables,
7963 (splay_tree_key) decl);
7964 if (n2)
7966 if (octx->region_type == ORT_ACC_HOST_DATA)
7967 error ("variable %qE declared in enclosing "
7968 "%<host_data%> region", DECL_NAME (decl));
7969 nflags |= GOVD_MAP;
7970 if (octx->region_type == ORT_ACC_DATA
7971 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7972 nflags |= GOVD_MAP_0LEN_ARRAY;
7973 goto found_outer;
7978 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7979 | GOVD_MAP_ALLOC_ONLY)) == flags)
7981 tree type = TREE_TYPE (decl);
7983 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7984 && omp_privatize_by_reference (decl))
7985 type = TREE_TYPE (type);
7986 if (!omp_mappable_type (type))
7988 error ("%qD referenced in target region does not have "
7989 "a mappable type", decl);
7990 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7992 else
7994 if ((ctx->region_type & ORT_ACC) != 0)
7995 nflags = oacc_default_clause (ctx, decl, flags);
7996 else
7997 nflags |= GOVD_MAP;
8000 found_outer:
8001 omp_add_variable (ctx, decl, nflags);
8003 else
8005 /* If nothing changed, there's nothing left to do. */
8006 if ((n->value & flags) == flags)
8007 return ret;
8008 flags |= n->value;
8009 n->value = flags;
8011 goto do_outer;
8014 if (n == NULL)
8016 if (ctx->region_type == ORT_WORKSHARE
8017 || ctx->region_type == ORT_TASKGROUP
8018 || ctx->region_type == ORT_SIMD
8019 || ctx->region_type == ORT_ACC
8020 || (ctx->region_type & ORT_TARGET_DATA) != 0)
8021 goto do_outer;
8023 flags = omp_default_clause (ctx, decl, in_code, flags);
8025 if ((flags & GOVD_PRIVATE)
8026 && lang_hooks.decls.omp_private_outer_ref (decl))
8027 flags |= GOVD_PRIVATE_OUTER_REF;
8029 omp_add_variable (ctx, decl, flags);
8031 shared = (flags & GOVD_SHARED) != 0;
8032 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8033 goto do_outer;
8036 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
8037 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
8038 if (ctx->region_type == ORT_SIMD
8039 && ctx->in_for_exprs
8040 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
8041 == GOVD_PRIVATE))
8042 flags &= ~GOVD_SEEN;
8044 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
8045 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
8046 && DECL_SIZE (decl))
8048 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8050 splay_tree_node n2;
8051 tree t = DECL_VALUE_EXPR (decl);
8052 gcc_assert (INDIRECT_REF_P (t));
8053 t = TREE_OPERAND (t, 0);
8054 gcc_assert (DECL_P (t));
8055 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8056 n2->value |= GOVD_SEEN;
8058 else if (omp_privatize_by_reference (decl)
8059 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
8060 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
8061 != INTEGER_CST))
8063 splay_tree_node n2;
8064 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
8065 gcc_assert (DECL_P (t));
8066 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8067 if (n2)
8068 omp_notice_variable (ctx, t, true);
8072 if (ctx->region_type & ORT_ACC)
8073 /* For OpenACC, as remarked above, defer expansion. */
8074 shared = false;
8075 else
8076 shared = ((flags | n->value) & GOVD_SHARED) != 0;
8077 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8079 /* If nothing changed, there's nothing left to do. */
8080 if ((n->value & flags) == flags)
8081 return ret;
8082 flags |= n->value;
8083 n->value = flags;
8085 do_outer:
8086 /* If the variable is private in the current context, then we don't
8087 need to propagate anything to an outer context. */
8088 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
8089 return ret;
8090 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8091 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8092 return ret;
8093 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8094 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8095 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8096 return ret;
8097 if (ctx->outer_context
8098 && omp_notice_variable (ctx->outer_context, decl, in_code))
8099 return true;
8100 return ret;
8103 /* Verify that DECL is private within CTX. If there's specific information
8104 to the contrary in the innermost scope, generate an error. */
8106 static bool
8107 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
8109 splay_tree_node n;
8111 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8112 if (n != NULL)
8114 if (n->value & GOVD_SHARED)
8116 if (ctx == gimplify_omp_ctxp)
8118 if (simd)
8119 error ("iteration variable %qE is predetermined linear",
8120 DECL_NAME (decl));
8121 else
8122 error ("iteration variable %qE should be private",
8123 DECL_NAME (decl));
8124 n->value = GOVD_PRIVATE;
8125 return true;
8127 else
8128 return false;
8130 else if ((n->value & GOVD_EXPLICIT) != 0
8131 && (ctx == gimplify_omp_ctxp
8132 || (ctx->region_type == ORT_COMBINED_PARALLEL
8133 && gimplify_omp_ctxp->outer_context == ctx)))
8135 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
8136 error ("iteration variable %qE should not be firstprivate",
8137 DECL_NAME (decl));
8138 else if ((n->value & GOVD_REDUCTION) != 0)
8139 error ("iteration variable %qE should not be reduction",
8140 DECL_NAME (decl));
8141 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
8142 error ("iteration variable %qE should not be linear",
8143 DECL_NAME (decl));
8145 return (ctx == gimplify_omp_ctxp
8146 || (ctx->region_type == ORT_COMBINED_PARALLEL
8147 && gimplify_omp_ctxp->outer_context == ctx));
8150 if (ctx->region_type != ORT_WORKSHARE
8151 && ctx->region_type != ORT_TASKGROUP
8152 && ctx->region_type != ORT_SIMD
8153 && ctx->region_type != ORT_ACC)
8154 return false;
8155 else if (ctx->outer_context)
8156 return omp_is_private (ctx->outer_context, decl, simd);
8157 return false;
8160 /* Return true if DECL is private within a parallel region
8161 that binds to the current construct's context or in parallel
8162 region's REDUCTION clause. */
8164 static bool
8165 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
8167 splay_tree_node n;
8171 ctx = ctx->outer_context;
8172 if (ctx == NULL)
8174 if (is_global_var (decl))
8175 return false;
8177 /* References might be private, but might be shared too,
8178 when checking for copyprivate, assume they might be
8179 private, otherwise assume they might be shared. */
8180 if (copyprivate)
8181 return true;
8183 if (omp_privatize_by_reference (decl))
8184 return false;
8186 /* Treat C++ privatized non-static data members outside
8187 of the privatization the same. */
8188 if (omp_member_access_dummy_var (decl))
8189 return false;
8191 return true;
8194 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8196 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8197 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
8199 if ((ctx->region_type & ORT_TARGET_DATA) != 0
8200 || n == NULL
8201 || (n->value & GOVD_MAP) == 0)
8202 continue;
8203 return false;
8206 if (n != NULL)
8208 if ((n->value & GOVD_LOCAL) != 0
8209 && omp_member_access_dummy_var (decl))
8210 return false;
8211 return (n->value & GOVD_SHARED) == 0;
8214 if (ctx->region_type == ORT_WORKSHARE
8215 || ctx->region_type == ORT_TASKGROUP
8216 || ctx->region_type == ORT_SIMD
8217 || ctx->region_type == ORT_ACC)
8218 continue;
8220 break;
8222 while (1);
8223 return false;
8226 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8228 static tree
8229 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
8231 tree t = *tp;
8233 /* If this node has been visited, unmark it and keep looking. */
8234 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
8235 return t;
8237 if (IS_TYPE_OR_DECL_P (t))
8238 *walk_subtrees = 0;
8239 return NULL_TREE;
8243 /* Gimplify the affinity clause but effectively ignore it.
8244 Generate:
8245 var = begin;
8246 if ((step > 1) ? var <= end : var > end)
8247 locatator_var_expr; */
8249 static void
8250 gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
8252 tree last_iter = NULL_TREE;
8253 tree last_bind = NULL_TREE;
8254 tree label = NULL_TREE;
8255 tree *last_body = NULL;
8256 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8257 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
8259 tree t = OMP_CLAUSE_DECL (c);
8260 if (TREE_CODE (t) == TREE_LIST
8261 && TREE_PURPOSE (t)
8262 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8264 if (TREE_VALUE (t) == null_pointer_node)
8265 continue;
8266 if (TREE_PURPOSE (t) != last_iter)
8268 if (last_bind)
8270 append_to_statement_list (label, last_body);
8271 gimplify_and_add (last_bind, pre_p);
8272 last_bind = NULL_TREE;
8274 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8276 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8277 is_gimple_val, fb_rvalue) == GS_ERROR
8278 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8279 is_gimple_val, fb_rvalue) == GS_ERROR
8280 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8281 is_gimple_val, fb_rvalue) == GS_ERROR
8282 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8283 is_gimple_val, fb_rvalue)
8284 == GS_ERROR))
8285 return;
8287 last_iter = TREE_PURPOSE (t);
8288 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8289 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
8290 NULL, block);
8291 last_body = &BIND_EXPR_BODY (last_bind);
8292 tree cond = NULL_TREE;
8293 location_t loc = OMP_CLAUSE_LOCATION (c);
8294 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8296 tree var = TREE_VEC_ELT (it, 0);
8297 tree begin = TREE_VEC_ELT (it, 1);
8298 tree end = TREE_VEC_ELT (it, 2);
8299 tree step = TREE_VEC_ELT (it, 3);
8300 loc = DECL_SOURCE_LOCATION (var);
8301 tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8302 var, begin);
8303 append_to_statement_list_force (tem, last_body);
8305 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8306 step, build_zero_cst (TREE_TYPE (step)));
8307 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
8308 var, end);
8309 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8310 var, end);
8311 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
8312 cond1, cond2, cond3);
8313 if (cond)
8314 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
8315 boolean_type_node, cond, cond1);
8316 else
8317 cond = cond1;
8319 tree cont_label = create_artificial_label (loc);
8320 label = build1 (LABEL_EXPR, void_type_node, cont_label);
8321 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
8322 void_node,
8323 build_and_jump (&cont_label));
8324 append_to_statement_list_force (tem, last_body);
8326 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8328 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
8329 last_body);
8330 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8332 if (error_operand_p (TREE_VALUE (t)))
8333 return;
8334 append_to_statement_list_force (TREE_VALUE (t), last_body);
8335 TREE_VALUE (t) = null_pointer_node;
8337 else
8339 if (last_bind)
8341 append_to_statement_list (label, last_body);
8342 gimplify_and_add (last_bind, pre_p);
8343 last_bind = NULL_TREE;
8345 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8347 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8348 NULL, is_gimple_val, fb_rvalue);
8349 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8351 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8352 return;
8353 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8354 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
8355 return;
8356 gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
8359 if (last_bind)
8361 append_to_statement_list (label, last_body);
8362 gimplify_and_add (last_bind, pre_p);
8364 return;
8367 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8368 lower all the depend clauses by populating corresponding depend
8369 array. Returns 0 if there are no such depend clauses, or
8370 2 if all depend clauses should be removed, 1 otherwise. */
8372 static int
8373 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
8375 tree c;
8376 gimple *g;
8377 size_t n[5] = { 0, 0, 0, 0, 0 };
8378 bool unused[5];
8379 tree counts[5] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
8380 tree last_iter = NULL_TREE, last_count = NULL_TREE;
8381 size_t i, j;
8382 location_t first_loc = UNKNOWN_LOCATION;
8384 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8385 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8387 switch (OMP_CLAUSE_DEPEND_KIND (c))
8389 case OMP_CLAUSE_DEPEND_IN:
8390 i = 2;
8391 break;
8392 case OMP_CLAUSE_DEPEND_OUT:
8393 case OMP_CLAUSE_DEPEND_INOUT:
8394 i = 0;
8395 break;
8396 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8397 i = 1;
8398 break;
8399 case OMP_CLAUSE_DEPEND_DEPOBJ:
8400 i = 3;
8401 break;
8402 case OMP_CLAUSE_DEPEND_INOUTSET:
8403 i = 4;
8404 break;
8405 default:
8406 gcc_unreachable ();
8408 tree t = OMP_CLAUSE_DECL (c);
8409 if (first_loc == UNKNOWN_LOCATION)
8410 first_loc = OMP_CLAUSE_LOCATION (c);
8411 if (TREE_CODE (t) == TREE_LIST
8412 && TREE_PURPOSE (t)
8413 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8415 if (TREE_PURPOSE (t) != last_iter)
8417 tree tcnt = size_one_node;
8418 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8420 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8421 is_gimple_val, fb_rvalue) == GS_ERROR
8422 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8423 is_gimple_val, fb_rvalue) == GS_ERROR
8424 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8425 is_gimple_val, fb_rvalue) == GS_ERROR
8426 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8427 is_gimple_val, fb_rvalue)
8428 == GS_ERROR))
8429 return 2;
8430 tree var = TREE_VEC_ELT (it, 0);
8431 tree begin = TREE_VEC_ELT (it, 1);
8432 tree end = TREE_VEC_ELT (it, 2);
8433 tree step = TREE_VEC_ELT (it, 3);
8434 tree orig_step = TREE_VEC_ELT (it, 4);
8435 tree type = TREE_TYPE (var);
8436 tree stype = TREE_TYPE (step);
8437 location_t loc = DECL_SOURCE_LOCATION (var);
8438 tree endmbegin;
8439 /* Compute count for this iterator as
8440 orig_step > 0
8441 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8442 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8443 and compute product of those for the entire depend
8444 clause. */
8445 if (POINTER_TYPE_P (type))
8446 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8447 stype, end, begin);
8448 else
8449 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8450 end, begin);
8451 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8452 step,
8453 build_int_cst (stype, 1));
8454 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8455 build_int_cst (stype, 1));
8456 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8457 unshare_expr (endmbegin),
8458 stepm1);
8459 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8460 pos, step);
8461 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8462 endmbegin, stepp1);
8463 if (TYPE_UNSIGNED (stype))
8465 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8466 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8468 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8469 neg, step);
8470 step = NULL_TREE;
8471 tree cond = fold_build2_loc (loc, LT_EXPR,
8472 boolean_type_node,
8473 begin, end);
8474 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8475 build_int_cst (stype, 0));
8476 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8477 end, begin);
8478 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8479 build_int_cst (stype, 0));
8480 tree osteptype = TREE_TYPE (orig_step);
8481 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8482 orig_step,
8483 build_int_cst (osteptype, 0));
8484 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8485 cond, pos, neg);
8486 cnt = fold_convert_loc (loc, sizetype, cnt);
8487 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8488 fb_rvalue) == GS_ERROR)
8489 return 2;
8490 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8492 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8493 fb_rvalue) == GS_ERROR)
8494 return 2;
8495 last_iter = TREE_PURPOSE (t);
8496 last_count = tcnt;
8498 if (counts[i] == NULL_TREE)
8499 counts[i] = last_count;
8500 else
8501 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8502 PLUS_EXPR, counts[i], last_count);
8504 else
8505 n[i]++;
8507 for (i = 0; i < 5; i++)
8508 if (counts[i])
8509 break;
8510 if (i == 5)
8511 return 0;
8513 tree total = size_zero_node;
8514 for (i = 0; i < 5; i++)
8516 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8517 if (counts[i] == NULL_TREE)
8518 counts[i] = size_zero_node;
8519 if (n[i])
8520 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8521 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8522 fb_rvalue) == GS_ERROR)
8523 return 2;
8524 total = size_binop (PLUS_EXPR, total, counts[i]);
8527 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8528 == GS_ERROR)
8529 return 2;
8530 bool is_old = unused[1] && unused[3] && unused[4];
8531 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8532 size_int (is_old ? 1 : 4));
8533 if (!unused[4])
8534 totalpx = size_binop (PLUS_EXPR, totalpx,
8535 size_binop (MULT_EXPR, counts[4], size_int (2)));
8536 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8537 tree array = create_tmp_var_raw (type);
8538 TREE_ADDRESSABLE (array) = 1;
8539 if (!poly_int_tree_p (totalpx))
8541 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8542 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8543 if (gimplify_omp_ctxp)
8545 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8546 while (ctx
8547 && (ctx->region_type == ORT_WORKSHARE
8548 || ctx->region_type == ORT_TASKGROUP
8549 || ctx->region_type == ORT_SIMD
8550 || ctx->region_type == ORT_ACC))
8551 ctx = ctx->outer_context;
8552 if (ctx)
8553 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
8555 gimplify_vla_decl (array, pre_p);
8557 else
8558 gimple_add_tmp_var (array);
8559 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8560 NULL_TREE);
8561 tree tem;
8562 if (!is_old)
8564 tem = build2 (MODIFY_EXPR, void_type_node, r,
8565 build_int_cst (ptr_type_node, 0));
8566 gimplify_and_add (tem, pre_p);
8567 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8568 NULL_TREE);
8570 tem = build2 (MODIFY_EXPR, void_type_node, r,
8571 fold_convert (ptr_type_node, total));
8572 gimplify_and_add (tem, pre_p);
8573 for (i = 1; i < (is_old ? 2 : 4); i++)
8575 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8576 NULL_TREE, NULL_TREE);
8577 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8578 gimplify_and_add (tem, pre_p);
8581 tree cnts[6];
8582 for (j = 5; j; j--)
8583 if (!unused[j - 1])
8584 break;
8585 for (i = 0; i < 5; i++)
8587 if (i && (i >= j || unused[i - 1]))
8589 cnts[i] = cnts[i - 1];
8590 continue;
8592 cnts[i] = create_tmp_var (sizetype);
8593 if (i == 0)
8594 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8595 else
8597 tree t;
8598 if (is_old)
8599 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8600 else
8601 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8602 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8603 == GS_ERROR)
8604 return 2;
8605 g = gimple_build_assign (cnts[i], t);
8607 gimple_seq_add_stmt (pre_p, g);
8609 if (unused[4])
8610 cnts[5] = NULL_TREE;
8611 else
8613 tree t = size_binop (PLUS_EXPR, total, size_int (5));
8614 cnts[5] = create_tmp_var (sizetype);
8615 g = gimple_build_assign (cnts[i], t);
8616 gimple_seq_add_stmt (pre_p, g);
8619 last_iter = NULL_TREE;
8620 tree last_bind = NULL_TREE;
8621 tree *last_body = NULL;
8622 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8623 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8625 switch (OMP_CLAUSE_DEPEND_KIND (c))
8627 case OMP_CLAUSE_DEPEND_IN:
8628 i = 2;
8629 break;
8630 case OMP_CLAUSE_DEPEND_OUT:
8631 case OMP_CLAUSE_DEPEND_INOUT:
8632 i = 0;
8633 break;
8634 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8635 i = 1;
8636 break;
8637 case OMP_CLAUSE_DEPEND_DEPOBJ:
8638 i = 3;
8639 break;
8640 case OMP_CLAUSE_DEPEND_INOUTSET:
8641 i = 4;
8642 break;
8643 default:
8644 gcc_unreachable ();
8646 tree t = OMP_CLAUSE_DECL (c);
8647 if (TREE_CODE (t) == TREE_LIST
8648 && TREE_PURPOSE (t)
8649 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8651 if (TREE_PURPOSE (t) != last_iter)
8653 if (last_bind)
8654 gimplify_and_add (last_bind, pre_p);
8655 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8656 last_bind = build3 (BIND_EXPR, void_type_node,
8657 BLOCK_VARS (block), NULL, block);
8658 TREE_SIDE_EFFECTS (last_bind) = 1;
8659 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8660 tree *p = &BIND_EXPR_BODY (last_bind);
8661 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8663 tree var = TREE_VEC_ELT (it, 0);
8664 tree begin = TREE_VEC_ELT (it, 1);
8665 tree end = TREE_VEC_ELT (it, 2);
8666 tree step = TREE_VEC_ELT (it, 3);
8667 tree orig_step = TREE_VEC_ELT (it, 4);
8668 tree type = TREE_TYPE (var);
8669 location_t loc = DECL_SOURCE_LOCATION (var);
8670 /* Emit:
8671 var = begin;
8672 goto cond_label;
8673 beg_label:
8675 var = var + step;
8676 cond_label:
8677 if (orig_step > 0) {
8678 if (var < end) goto beg_label;
8679 } else {
8680 if (var > end) goto beg_label;
8682 for each iterator, with inner iterators added to
8683 the ... above. */
8684 tree beg_label = create_artificial_label (loc);
8685 tree cond_label = NULL_TREE;
8686 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8687 var, begin);
8688 append_to_statement_list_force (tem, p);
8689 tem = build_and_jump (&cond_label);
8690 append_to_statement_list_force (tem, p);
8691 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8692 append_to_statement_list (tem, p);
8693 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8694 NULL_TREE, NULL_TREE);
8695 TREE_SIDE_EFFECTS (bind) = 1;
8696 SET_EXPR_LOCATION (bind, loc);
8697 append_to_statement_list_force (bind, p);
8698 if (POINTER_TYPE_P (type))
8699 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8700 var, fold_convert_loc (loc, sizetype,
8701 step));
8702 else
8703 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8704 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8705 var, tem);
8706 append_to_statement_list_force (tem, p);
8707 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8708 append_to_statement_list (tem, p);
8709 tree cond = fold_build2_loc (loc, LT_EXPR,
8710 boolean_type_node,
8711 var, end);
8712 tree pos
8713 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8714 cond, build_and_jump (&beg_label),
8715 void_node);
8716 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8717 var, end);
8718 tree neg
8719 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8720 cond, build_and_jump (&beg_label),
8721 void_node);
8722 tree osteptype = TREE_TYPE (orig_step);
8723 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8724 orig_step,
8725 build_int_cst (osteptype, 0));
8726 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8727 cond, pos, neg);
8728 append_to_statement_list_force (tem, p);
8729 p = &BIND_EXPR_BODY (bind);
8731 last_body = p;
8733 last_iter = TREE_PURPOSE (t);
8734 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8736 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8737 0), last_body);
8738 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8740 if (error_operand_p (TREE_VALUE (t)))
8741 return 2;
8742 if (TREE_VALUE (t) != null_pointer_node)
8743 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8744 if (i == 4)
8746 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8747 NULL_TREE, NULL_TREE);
8748 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8749 NULL_TREE, NULL_TREE);
8750 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
8751 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8752 void_type_node, r, r2);
8753 append_to_statement_list_force (tem, last_body);
8754 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8755 void_type_node, cnts[i],
8756 size_binop (PLUS_EXPR, cnts[i],
8757 size_int (1)));
8758 append_to_statement_list_force (tem, last_body);
8759 i = 5;
8761 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8762 NULL_TREE, NULL_TREE);
8763 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8764 void_type_node, r, TREE_VALUE (t));
8765 append_to_statement_list_force (tem, last_body);
8766 if (i == 5)
8768 r = build4 (ARRAY_REF, ptr_type_node, array,
8769 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
8770 NULL_TREE, NULL_TREE);
8771 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
8772 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8773 void_type_node, r, tem);
8774 append_to_statement_list_force (tem, last_body);
8776 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8777 void_type_node, cnts[i],
8778 size_binop (PLUS_EXPR, cnts[i],
8779 size_int (1 + (i == 5))));
8780 append_to_statement_list_force (tem, last_body);
8781 TREE_VALUE (t) = null_pointer_node;
8783 else
8785 if (last_bind)
8787 gimplify_and_add (last_bind, pre_p);
8788 last_bind = NULL_TREE;
8790 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8792 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8793 NULL, is_gimple_val, fb_rvalue);
8794 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8796 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8797 return 2;
8798 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
8799 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8800 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8801 is_gimple_val, fb_rvalue) == GS_ERROR)
8802 return 2;
8803 if (i == 4)
8805 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8806 NULL_TREE, NULL_TREE);
8807 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8808 NULL_TREE, NULL_TREE);
8809 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
8810 tem = build2 (MODIFY_EXPR, void_type_node, r, r2);
8811 gimplify_and_add (tem, pre_p);
8812 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR,
8813 cnts[i],
8814 size_int (1)));
8815 gimple_seq_add_stmt (pre_p, g);
8816 i = 5;
8818 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8819 NULL_TREE, NULL_TREE);
8820 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8821 gimplify_and_add (tem, pre_p);
8822 if (i == 5)
8824 r = build4 (ARRAY_REF, ptr_type_node, array,
8825 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
8826 NULL_TREE, NULL_TREE);
8827 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
8828 tem = build2 (MODIFY_EXPR, void_type_node, r, tem);
8829 append_to_statement_list_force (tem, last_body);
8830 gimplify_and_add (tem, pre_p);
8832 g = gimple_build_assign (cnts[i],
8833 size_binop (PLUS_EXPR, cnts[i],
8834 size_int (1 + (i == 5))));
8835 gimple_seq_add_stmt (pre_p, g);
8838 if (last_bind)
8839 gimplify_and_add (last_bind, pre_p);
8840 tree cond = boolean_false_node;
8841 if (is_old)
8843 if (!unused[0])
8844 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8845 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8846 size_int (2)));
8847 if (!unused[2])
8848 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8849 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8850 cnts[2],
8851 size_binop_loc (first_loc, PLUS_EXPR,
8852 totalpx,
8853 size_int (1))));
8855 else
8857 tree prev = size_int (5);
8858 for (i = 0; i < 5; i++)
8860 if (unused[i])
8861 continue;
8862 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8863 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8864 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8865 cnts[i], unshare_expr (prev)));
8868 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8869 build_call_expr_loc (first_loc,
8870 builtin_decl_explicit (BUILT_IN_TRAP),
8871 0), void_node);
8872 gimplify_and_add (tem, pre_p);
8873 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8874 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8875 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8876 OMP_CLAUSE_CHAIN (c) = *list_p;
8877 *list_p = c;
8878 return 1;
8881 /* For a set of mappings describing an array section pointed to by a struct
8882 (or derived type, etc.) component, create an "alloc" or "release" node to
8883 insert into a list following a GOMP_MAP_STRUCT node. For some types of
8884 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
8885 be created that is inserted into the list of mapping nodes attached to the
8886 directive being processed -- not part of the sorted list of nodes after
8887 GOMP_MAP_STRUCT.
8889 CODE is the code of the directive being processed. GRP_START and GRP_END
8890 are the first and last of two or three nodes representing this array section
8891 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
8892 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
8893 filled with the additional node described above, if needed.
8895 This function does not add the new nodes to any lists itself. It is the
8896 responsibility of the caller to do that. */
8898 static tree
8899 build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
8900 tree *extra_node)
8902 enum gomp_map_kind mkind
8903 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8904 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8906 gcc_assert (grp_start != grp_end);
8908 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
8909 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8910 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
8911 OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
8912 tree grp_mid = NULL_TREE;
8913 if (OMP_CLAUSE_CHAIN (grp_start) != grp_end)
8914 grp_mid = OMP_CLAUSE_CHAIN (grp_start);
8916 if (grp_mid
8917 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
8918 && OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_TO_PSET)
8919 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (grp_mid);
8920 else
8921 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8923 if (grp_mid
8924 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
8925 && (OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ALWAYS_POINTER
8926 || OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ATTACH_DETACH))
8928 tree c3
8929 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
8930 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8931 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
8932 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8933 OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
8935 *extra_node = c3;
8937 else
8938 *extra_node = NULL_TREE;
8940 return c2;
8943 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8944 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8945 If BASE_REF is non-NULL and the containing object is a reference, set
8946 *BASE_REF to that reference before dereferencing the object.
8947 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8948 has array type, else return NULL. */
8950 static tree
8951 extract_base_bit_offset (tree base, poly_int64 *bitposp,
8952 poly_offset_int *poffsetp)
8954 tree offset;
8955 poly_int64 bitsize, bitpos;
8956 machine_mode mode;
8957 int unsignedp, reversep, volatilep = 0;
8958 poly_offset_int poffset;
8960 STRIP_NOPS (base);
8962 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8963 &unsignedp, &reversep, &volatilep);
8965 STRIP_NOPS (base);
8967 if (offset && poly_int_tree_p (offset))
8969 poffset = wi::to_poly_offset (offset);
8970 offset = NULL_TREE;
8972 else
8973 poffset = 0;
8975 if (maybe_ne (bitpos, 0))
8976 poffset += bits_to_bytes_round_down (bitpos);
8978 *bitposp = bitpos;
8979 *poffsetp = poffset;
8981 return base;
8984 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
8985 started processing the group yet. The TEMPORARY mark is used when we first
8986 encounter a group on a depth-first traversal, and the PERMANENT mark is used
8987 when we have processed all the group's children (i.e. all the base pointers
8988 referred to by the group's mapping nodes, recursively). */
8990 enum omp_tsort_mark {
8991 UNVISITED,
8992 TEMPORARY,
8993 PERMANENT
8996 /* Hash for trees based on operand_equal_p. Like tree_operand_hash
8997 but ignores side effects in the equality comparisons. */
8999 struct tree_operand_hash_no_se : tree_operand_hash
9001 static inline bool equal (const value_type &,
9002 const compare_type &);
9005 inline bool
9006 tree_operand_hash_no_se::equal (const value_type &t1,
9007 const compare_type &t2)
9009 return operand_equal_p (t1, t2, OEP_MATCH_SIDE_EFFECTS);
9012 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
9013 clause. */
9015 struct omp_mapping_group {
9016 tree *grp_start;
9017 tree grp_end;
9018 omp_tsort_mark mark;
9019 /* If we've removed the group but need to reindex, mark the group as
9020 deleted. */
9021 bool deleted;
9022 struct omp_mapping_group *sibling;
9023 struct omp_mapping_group *next;
9026 DEBUG_FUNCTION void
9027 debug_mapping_group (omp_mapping_group *grp)
9029 tree tmp = OMP_CLAUSE_CHAIN (grp->grp_end);
9030 OMP_CLAUSE_CHAIN (grp->grp_end) = NULL;
9031 debug_generic_expr (*grp->grp_start);
9032 OMP_CLAUSE_CHAIN (grp->grp_end) = tmp;
9035 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
9036 isn't one. */
9038 static tree
9039 omp_get_base_pointer (tree expr)
9041 while (TREE_CODE (expr) == ARRAY_REF
9042 || TREE_CODE (expr) == COMPONENT_REF)
9043 expr = TREE_OPERAND (expr, 0);
9045 if (INDIRECT_REF_P (expr)
9046 || (TREE_CODE (expr) == MEM_REF
9047 && integer_zerop (TREE_OPERAND (expr, 1))))
9049 expr = TREE_OPERAND (expr, 0);
9050 while (TREE_CODE (expr) == COMPOUND_EXPR)
9051 expr = TREE_OPERAND (expr, 1);
9052 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
9053 expr = TREE_OPERAND (expr, 0);
9054 if (TREE_CODE (expr) == SAVE_EXPR)
9055 expr = TREE_OPERAND (expr, 0);
9056 STRIP_NOPS (expr);
9057 return expr;
9060 return NULL_TREE;
9063 /* Remove COMPONENT_REFS and indirections from EXPR. */
9065 static tree
9066 omp_strip_components_and_deref (tree expr)
9068 while (TREE_CODE (expr) == COMPONENT_REF
9069 || INDIRECT_REF_P (expr)
9070 || (TREE_CODE (expr) == MEM_REF
9071 && integer_zerop (TREE_OPERAND (expr, 1)))
9072 || TREE_CODE (expr) == POINTER_PLUS_EXPR
9073 || TREE_CODE (expr) == COMPOUND_EXPR)
9074 if (TREE_CODE (expr) == COMPOUND_EXPR)
9075 expr = TREE_OPERAND (expr, 1);
9076 else
9077 expr = TREE_OPERAND (expr, 0);
9079 STRIP_NOPS (expr);
9081 return expr;
9084 static tree
9085 omp_strip_indirections (tree expr)
9087 while (INDIRECT_REF_P (expr)
9088 || (TREE_CODE (expr) == MEM_REF
9089 && integer_zerop (TREE_OPERAND (expr, 1))))
9090 expr = TREE_OPERAND (expr, 0);
9092 return expr;
9095 /* An attach or detach operation depends directly on the address being
9096 attached/detached. Return that address, or none if there are no
9097 attachments/detachments. */
9099 static tree
9100 omp_get_attachment (omp_mapping_group *grp)
9102 tree node = *grp->grp_start;
9104 switch (OMP_CLAUSE_MAP_KIND (node))
9106 case GOMP_MAP_TO:
9107 case GOMP_MAP_FROM:
9108 case GOMP_MAP_TOFROM:
9109 case GOMP_MAP_ALWAYS_FROM:
9110 case GOMP_MAP_ALWAYS_TO:
9111 case GOMP_MAP_ALWAYS_TOFROM:
9112 case GOMP_MAP_FORCE_FROM:
9113 case GOMP_MAP_FORCE_TO:
9114 case GOMP_MAP_FORCE_TOFROM:
9115 case GOMP_MAP_FORCE_PRESENT:
9116 case GOMP_MAP_PRESENT_ALLOC:
9117 case GOMP_MAP_PRESENT_FROM:
9118 case GOMP_MAP_PRESENT_TO:
9119 case GOMP_MAP_PRESENT_TOFROM:
9120 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9121 case GOMP_MAP_ALWAYS_PRESENT_TO:
9122 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9123 case GOMP_MAP_ALLOC:
9124 case GOMP_MAP_RELEASE:
9125 case GOMP_MAP_DELETE:
9126 case GOMP_MAP_FORCE_ALLOC:
9127 if (node == grp->grp_end)
9128 return NULL_TREE;
9130 node = OMP_CLAUSE_CHAIN (node);
9131 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9133 gcc_assert (node != grp->grp_end);
9134 node = OMP_CLAUSE_CHAIN (node);
9136 if (node)
9137 switch (OMP_CLAUSE_MAP_KIND (node))
9139 case GOMP_MAP_POINTER:
9140 case GOMP_MAP_ALWAYS_POINTER:
9141 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9142 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9143 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9144 return NULL_TREE;
9146 case GOMP_MAP_ATTACH_DETACH:
9147 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9148 return OMP_CLAUSE_DECL (node);
9150 default:
9151 internal_error ("unexpected mapping node");
9153 return error_mark_node;
9155 case GOMP_MAP_TO_PSET:
9156 gcc_assert (node != grp->grp_end);
9157 node = OMP_CLAUSE_CHAIN (node);
9158 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9159 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9160 return OMP_CLAUSE_DECL (node);
9161 else
9162 internal_error ("unexpected mapping node");
9163 return error_mark_node;
9165 case GOMP_MAP_ATTACH:
9166 case GOMP_MAP_DETACH:
9167 node = OMP_CLAUSE_CHAIN (node);
9168 if (!node || *grp->grp_start == grp->grp_end)
9169 return OMP_CLAUSE_DECL (*grp->grp_start);
9170 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9171 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9172 return OMP_CLAUSE_DECL (*grp->grp_start);
9173 else
9174 internal_error ("unexpected mapping node");
9175 return error_mark_node;
9177 case GOMP_MAP_STRUCT:
9178 case GOMP_MAP_FORCE_DEVICEPTR:
9179 case GOMP_MAP_DEVICE_RESIDENT:
9180 case GOMP_MAP_LINK:
9181 case GOMP_MAP_IF_PRESENT:
9182 case GOMP_MAP_FIRSTPRIVATE:
9183 case GOMP_MAP_FIRSTPRIVATE_INT:
9184 case GOMP_MAP_USE_DEVICE_PTR:
9185 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9186 return NULL_TREE;
9188 default:
9189 internal_error ("unexpected mapping node");
9192 return error_mark_node;
9195 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9196 mappings, return the chain pointer to the end of that group in the list. */
9198 static tree *
9199 omp_group_last (tree *start_p)
9201 tree c = *start_p, nc, *grp_last_p = start_p;
9203 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9205 nc = OMP_CLAUSE_CHAIN (c);
9207 if (!nc || OMP_CLAUSE_CODE (nc) != OMP_CLAUSE_MAP)
9208 return grp_last_p;
9210 switch (OMP_CLAUSE_MAP_KIND (c))
9212 default:
9213 while (nc
9214 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9215 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9216 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9217 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH
9218 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
9219 || (OMP_CLAUSE_MAP_KIND (nc)
9220 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9221 || (OMP_CLAUSE_MAP_KIND (nc)
9222 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)
9223 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ALWAYS_POINTER
9224 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_TO_PSET))
9226 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9227 c = nc;
9228 tree nc2 = OMP_CLAUSE_CHAIN (nc);
9229 if (nc2
9230 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9231 && (OMP_CLAUSE_MAP_KIND (nc)
9232 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9233 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH)
9235 grp_last_p = &OMP_CLAUSE_CHAIN (nc);
9236 c = nc2;
9237 nc2 = OMP_CLAUSE_CHAIN (nc2);
9239 nc = nc2;
9241 break;
9243 case GOMP_MAP_ATTACH:
9244 case GOMP_MAP_DETACH:
9245 /* This is a weird artifact of how directives are parsed: bare attach or
9246 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9247 FIRSTPRIVATE_REFERENCE node. FIXME. */
9248 if (nc
9249 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9250 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9251 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER))
9252 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9253 break;
9255 case GOMP_MAP_TO_PSET:
9256 if (OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9257 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH
9258 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH))
9259 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9260 break;
9262 case GOMP_MAP_STRUCT:
9264 unsigned HOST_WIDE_INT num_mappings
9265 = tree_to_uhwi (OMP_CLAUSE_SIZE (c));
9266 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9267 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9268 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH)
9269 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9270 for (unsigned i = 0; i < num_mappings; i++)
9271 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9273 break;
9276 return grp_last_p;
9279 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9280 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9281 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9282 if we have more than one such group, else return NULL. */
9284 static void
9285 omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
9286 tree gather_sentinel)
9288 for (tree *cp = list_p;
9289 *cp && *cp != gather_sentinel;
9290 cp = &OMP_CLAUSE_CHAIN (*cp))
9292 if (OMP_CLAUSE_CODE (*cp) != OMP_CLAUSE_MAP)
9293 continue;
9295 tree *grp_last_p = omp_group_last (cp);
9296 omp_mapping_group grp;
9298 grp.grp_start = cp;
9299 grp.grp_end = *grp_last_p;
9300 grp.mark = UNVISITED;
9301 grp.sibling = NULL;
9302 grp.deleted = false;
9303 grp.next = NULL;
9304 groups->safe_push (grp);
9306 cp = grp_last_p;
9310 static vec<omp_mapping_group> *
9311 omp_gather_mapping_groups (tree *list_p)
9313 vec<omp_mapping_group> *groups = new vec<omp_mapping_group> ();
9315 omp_gather_mapping_groups_1 (list_p, groups, NULL_TREE);
9317 if (groups->length () > 0)
9318 return groups;
9319 else
9321 delete groups;
9322 return NULL;
9326 /* A pointer mapping group GRP may define a block of memory starting at some
9327 base address, and maybe also define a firstprivate pointer or firstprivate
9328 reference that points to that block. The return value is a node containing
9329 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9330 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9331 return the number of consecutive chained nodes in CHAINED. */
9333 static tree
9334 omp_group_base (omp_mapping_group *grp, unsigned int *chained,
9335 tree *firstprivate)
9337 tree node = *grp->grp_start;
9339 *firstprivate = NULL_TREE;
9340 *chained = 1;
9342 switch (OMP_CLAUSE_MAP_KIND (node))
9344 case GOMP_MAP_TO:
9345 case GOMP_MAP_FROM:
9346 case GOMP_MAP_TOFROM:
9347 case GOMP_MAP_ALWAYS_FROM:
9348 case GOMP_MAP_ALWAYS_TO:
9349 case GOMP_MAP_ALWAYS_TOFROM:
9350 case GOMP_MAP_FORCE_FROM:
9351 case GOMP_MAP_FORCE_TO:
9352 case GOMP_MAP_FORCE_TOFROM:
9353 case GOMP_MAP_FORCE_PRESENT:
9354 case GOMP_MAP_PRESENT_ALLOC:
9355 case GOMP_MAP_PRESENT_FROM:
9356 case GOMP_MAP_PRESENT_TO:
9357 case GOMP_MAP_PRESENT_TOFROM:
9358 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9359 case GOMP_MAP_ALWAYS_PRESENT_TO:
9360 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9361 case GOMP_MAP_ALLOC:
9362 case GOMP_MAP_RELEASE:
9363 case GOMP_MAP_DELETE:
9364 case GOMP_MAP_FORCE_ALLOC:
9365 case GOMP_MAP_IF_PRESENT:
9366 if (node == grp->grp_end)
9367 return node;
9369 node = OMP_CLAUSE_CHAIN (node);
9370 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9372 if (node == grp->grp_end)
9373 return *grp->grp_start;
9374 node = OMP_CLAUSE_CHAIN (node);
9376 if (node)
9377 switch (OMP_CLAUSE_MAP_KIND (node))
9379 case GOMP_MAP_POINTER:
9380 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9381 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9382 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9383 *firstprivate = OMP_CLAUSE_DECL (node);
9384 return *grp->grp_start;
9386 case GOMP_MAP_ALWAYS_POINTER:
9387 case GOMP_MAP_ATTACH_DETACH:
9388 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9389 return *grp->grp_start;
9391 default:
9392 internal_error ("unexpected mapping node");
9394 else
9395 internal_error ("unexpected mapping node");
9396 return error_mark_node;
9398 case GOMP_MAP_TO_PSET:
9399 gcc_assert (node != grp->grp_end);
9400 node = OMP_CLAUSE_CHAIN (node);
9401 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9402 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9403 return NULL_TREE;
9404 else
9405 internal_error ("unexpected mapping node");
9406 return error_mark_node;
9408 case GOMP_MAP_ATTACH:
9409 case GOMP_MAP_DETACH:
9410 node = OMP_CLAUSE_CHAIN (node);
9411 if (!node || *grp->grp_start == grp->grp_end)
9412 return NULL_TREE;
9413 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9414 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9416 /* We're mapping the base pointer itself in a bare attach or detach
9417 node. This is a side effect of how parsing works, and the mapping
9418 will be removed anyway (at least for enter/exit data directives).
9419 We should ignore the mapping here. FIXME. */
9420 return NULL_TREE;
9422 else
9423 internal_error ("unexpected mapping node");
9424 return error_mark_node;
9426 case GOMP_MAP_STRUCT:
9428 unsigned HOST_WIDE_INT num_mappings
9429 = tree_to_uhwi (OMP_CLAUSE_SIZE (node));
9430 node = OMP_CLAUSE_CHAIN (node);
9431 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9432 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9434 *firstprivate = OMP_CLAUSE_DECL (node);
9435 node = OMP_CLAUSE_CHAIN (node);
9437 *chained = num_mappings;
9438 return node;
9441 case GOMP_MAP_FORCE_DEVICEPTR:
9442 case GOMP_MAP_DEVICE_RESIDENT:
9443 case GOMP_MAP_LINK:
9444 case GOMP_MAP_FIRSTPRIVATE:
9445 case GOMP_MAP_FIRSTPRIVATE_INT:
9446 case GOMP_MAP_USE_DEVICE_PTR:
9447 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9448 return NULL_TREE;
9450 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9451 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9452 case GOMP_MAP_POINTER:
9453 case GOMP_MAP_ALWAYS_POINTER:
9454 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9455 /* These shouldn't appear by themselves. */
9456 if (!seen_error ())
9457 internal_error ("unexpected pointer mapping node");
9458 return error_mark_node;
9460 default:
9461 gcc_unreachable ();
9464 return error_mark_node;
9467 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9468 nodes by tree_operand_hash_no_se. */
9470 static void
9471 omp_index_mapping_groups_1 (hash_map<tree_operand_hash_no_se,
9472 omp_mapping_group *> *grpmap,
9473 vec<omp_mapping_group> *groups,
9474 tree reindex_sentinel)
9476 omp_mapping_group *grp;
9477 unsigned int i;
9478 bool reindexing = reindex_sentinel != NULL_TREE, above_hwm = false;
9480 FOR_EACH_VEC_ELT (*groups, i, grp)
9482 if (reindexing && *grp->grp_start == reindex_sentinel)
9483 above_hwm = true;
9485 if (reindexing && !above_hwm)
9486 continue;
9488 tree fpp;
9489 unsigned int chained;
9490 tree node = omp_group_base (grp, &chained, &fpp);
9492 if (node == error_mark_node || (!node && !fpp))
9493 continue;
9495 for (unsigned j = 0;
9496 node && j < chained;
9497 node = OMP_CLAUSE_CHAIN (node), j++)
9499 tree decl = OMP_CLAUSE_DECL (node);
9500 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9501 meaning node-hash lookups don't work. This is a workaround for
9502 that, but ideally we should just create the INDIRECT_REF at
9503 source instead. FIXME. */
9504 if (TREE_CODE (decl) == MEM_REF
9505 && integer_zerop (TREE_OPERAND (decl, 1)))
9506 decl = build_fold_indirect_ref (TREE_OPERAND (decl, 0));
9508 omp_mapping_group **prev = grpmap->get (decl);
9510 if (prev && *prev == grp)
9511 /* Empty. */;
9512 else if (prev)
9514 /* Mapping the same thing twice is normally diagnosed as an error,
9515 but can happen under some circumstances, e.g. in pr99928-16.c,
9516 the directive:
9518 #pragma omp target simd reduction(+:a[:3]) \
9519 map(always, tofrom: a[:6])
9522 will result in two "a[0]" mappings (of different sizes). */
9524 grp->sibling = (*prev)->sibling;
9525 (*prev)->sibling = grp;
9527 else
9528 grpmap->put (decl, grp);
9531 if (!fpp)
9532 continue;
9534 omp_mapping_group **prev = grpmap->get (fpp);
9535 if (prev && *prev != grp)
9537 grp->sibling = (*prev)->sibling;
9538 (*prev)->sibling = grp;
9540 else
9541 grpmap->put (fpp, grp);
9545 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9546 omp_index_mapping_groups (vec<omp_mapping_group> *groups)
9548 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9549 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9551 omp_index_mapping_groups_1 (grpmap, groups, NULL_TREE);
9553 return grpmap;
9556 /* Rebuild group map from partially-processed clause list (during
9557 omp_build_struct_sibling_lists). We have already processed nodes up until
9558 a high-water mark (HWM). This is a bit tricky because the list is being
9559 reordered as it is scanned, but we know:
9561 1. The list after HWM has not been touched yet, so we can reindex it safely.
9563 2. The list before and including HWM has been altered, but remains
9564 well-formed throughout the sibling-list building operation.
9566 so, we can do the reindex operation in two parts, on the processed and
9567 then the unprocessed halves of the list. */
9569 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9570 omp_reindex_mapping_groups (tree *list_p,
9571 vec<omp_mapping_group> *groups,
9572 vec<omp_mapping_group> *processed_groups,
9573 tree sentinel)
9575 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9576 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9578 processed_groups->truncate (0);
9580 omp_gather_mapping_groups_1 (list_p, processed_groups, sentinel);
9581 omp_index_mapping_groups_1 (grpmap, processed_groups, NULL_TREE);
9582 if (sentinel)
9583 omp_index_mapping_groups_1 (grpmap, groups, sentinel);
9585 return grpmap;
9588 /* Find the immediately-containing struct for a component ref (etc.)
9589 expression EXPR. */
9591 static tree
9592 omp_containing_struct (tree expr)
9594 tree expr0 = expr;
9596 STRIP_NOPS (expr);
9598 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9599 component ref. */
9600 if (TREE_CODE (expr) != ARRAY_REF && TREE_CODE (expr) != COMPONENT_REF)
9601 return expr0;
9603 while (TREE_CODE (expr) == ARRAY_REF)
9604 expr = TREE_OPERAND (expr, 0);
9606 if (TREE_CODE (expr) == COMPONENT_REF)
9607 expr = TREE_OPERAND (expr, 0);
9609 return expr;
9612 /* Return TRUE if DECL describes a component that is part of a whole structure
9613 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9614 that maps that structure, if present. */
9616 static bool
9617 omp_mapped_by_containing_struct (hash_map<tree_operand_hash_no_se,
9618 omp_mapping_group *> *grpmap,
9619 tree decl,
9620 omp_mapping_group **mapped_by_group)
9622 tree wsdecl = NULL_TREE;
9624 *mapped_by_group = NULL;
9626 while (true)
9628 wsdecl = omp_containing_struct (decl);
9629 if (wsdecl == decl)
9630 break;
9631 omp_mapping_group **wholestruct = grpmap->get (wsdecl);
9632 if (!wholestruct
9633 && TREE_CODE (wsdecl) == MEM_REF
9634 && integer_zerop (TREE_OPERAND (wsdecl, 1)))
9636 tree deref = TREE_OPERAND (wsdecl, 0);
9637 deref = build_fold_indirect_ref (deref);
9638 wholestruct = grpmap->get (deref);
9640 if (wholestruct)
9642 *mapped_by_group = *wholestruct;
9643 return true;
9645 decl = wsdecl;
9648 return false;
9651 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9652 FALSE on error. */
9654 static bool
9655 omp_tsort_mapping_groups_1 (omp_mapping_group ***outlist,
9656 vec<omp_mapping_group> *groups,
9657 hash_map<tree_operand_hash_no_se,
9658 omp_mapping_group *> *grpmap,
9659 omp_mapping_group *grp)
9661 if (grp->mark == PERMANENT)
9662 return true;
9663 if (grp->mark == TEMPORARY)
9665 fprintf (stderr, "when processing group:\n");
9666 debug_mapping_group (grp);
9667 internal_error ("base pointer cycle detected");
9668 return false;
9670 grp->mark = TEMPORARY;
9672 tree attaches_to = omp_get_attachment (grp);
9674 if (attaches_to)
9676 omp_mapping_group **basep = grpmap->get (attaches_to);
9678 if (basep && *basep != grp)
9680 for (omp_mapping_group *w = *basep; w; w = w->sibling)
9681 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9682 return false;
9686 tree decl = OMP_CLAUSE_DECL (*grp->grp_start);
9688 while (decl)
9690 tree base = omp_get_base_pointer (decl);
9692 if (!base)
9693 break;
9695 omp_mapping_group **innerp = grpmap->get (base);
9696 omp_mapping_group *wholestruct;
9698 /* We should treat whole-structure mappings as if all (pointer, in this
9699 case) members are mapped as individual list items. Check if we have
9700 such a whole-structure mapping, if we don't have an explicit reference
9701 to the pointer member itself. */
9702 if (!innerp
9703 && TREE_CODE (base) == COMPONENT_REF
9704 && omp_mapped_by_containing_struct (grpmap, base, &wholestruct))
9705 innerp = &wholestruct;
9707 if (innerp && *innerp != grp)
9709 for (omp_mapping_group *w = *innerp; w; w = w->sibling)
9710 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9711 return false;
9712 break;
9715 decl = base;
9718 grp->mark = PERMANENT;
9720 /* Emit grp to output list. */
9722 **outlist = grp;
9723 *outlist = &grp->next;
9725 return true;
9728 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
9729 before mappings that use those pointers. This is an implementation of the
9730 depth-first search algorithm, described e.g. at:
9732 https://en.wikipedia.org/wiki/Topological_sorting
9735 static omp_mapping_group *
9736 omp_tsort_mapping_groups (vec<omp_mapping_group> *groups,
9737 hash_map<tree_operand_hash_no_se, omp_mapping_group *>
9738 *grpmap)
9740 omp_mapping_group *grp, *outlist = NULL, **cursor;
9741 unsigned int i;
9743 cursor = &outlist;
9745 FOR_EACH_VEC_ELT (*groups, i, grp)
9747 if (grp->mark != PERMANENT)
9748 if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
9749 return NULL;
9752 return outlist;
9755 /* Split INLIST into two parts, moving groups corresponding to
9756 ALLOC/RELEASE/DELETE mappings to one list, and other mappings to another.
9757 The former list is then appended to the latter. Each sub-list retains the
9758 order of the original list.
9759 Note that ATTACH nodes are later moved to the end of the list in
9760 gimplify_adjust_omp_clauses, for target regions. */
9762 static omp_mapping_group *
9763 omp_segregate_mapping_groups (omp_mapping_group *inlist)
9765 omp_mapping_group *ard_groups = NULL, *tf_groups = NULL;
9766 omp_mapping_group **ard_tail = &ard_groups, **tf_tail = &tf_groups;
9768 for (omp_mapping_group *w = inlist; w;)
9770 tree c = *w->grp_start;
9771 omp_mapping_group *next = w->next;
9773 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9775 switch (OMP_CLAUSE_MAP_KIND (c))
9777 case GOMP_MAP_ALLOC:
9778 case GOMP_MAP_RELEASE:
9779 case GOMP_MAP_DELETE:
9780 *ard_tail = w;
9781 w->next = NULL;
9782 ard_tail = &w->next;
9783 break;
9785 default:
9786 *tf_tail = w;
9787 w->next = NULL;
9788 tf_tail = &w->next;
9791 w = next;
9794 /* Now splice the lists together... */
9795 *tf_tail = ard_groups;
9797 return tf_groups;
9800 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
9801 those groups based on the output list of omp_tsort_mapping_groups --
9802 singly-linked, threaded through each element's NEXT pointer starting at
9803 HEAD. Each list element appears exactly once in that linked list.
9805 Each element of GROUPS may correspond to one or several mapping nodes.
9806 Node groups are kept together, and in the reordered list, the positions of
9807 the original groups are reused for the positions of the reordered list.
9808 Hence if we have e.g.
9810 {to ptr ptr} firstprivate {tofrom ptr} ...
9811 ^ ^ ^
9812 first group non-"map" second group
9814 and say the second group contains a base pointer for the first so must be
9815 moved before it, the resulting list will contain:
9817 {tofrom ptr} firstprivate {to ptr ptr} ...
9818 ^ prev. second group ^ prev. first group
9821 static tree *
9822 omp_reorder_mapping_groups (vec<omp_mapping_group> *groups,
9823 omp_mapping_group *head,
9824 tree *list_p)
9826 omp_mapping_group *grp;
9827 unsigned int i;
9828 unsigned numgroups = groups->length ();
9829 auto_vec<tree> old_heads (numgroups);
9830 auto_vec<tree *> old_headps (numgroups);
9831 auto_vec<tree> new_heads (numgroups);
9832 auto_vec<tree> old_succs (numgroups);
9833 bool map_at_start = (list_p == (*groups)[0].grp_start);
9835 tree *new_grp_tail = NULL;
9837 /* Stash the start & end nodes of each mapping group before we start
9838 modifying the list. */
9839 FOR_EACH_VEC_ELT (*groups, i, grp)
9841 old_headps.quick_push (grp->grp_start);
9842 old_heads.quick_push (*grp->grp_start);
9843 old_succs.quick_push (OMP_CLAUSE_CHAIN (grp->grp_end));
9846 /* And similarly, the heads of the groups in the order we want to rearrange
9847 the list to. */
9848 for (omp_mapping_group *w = head; w; w = w->next)
9849 new_heads.quick_push (*w->grp_start);
9851 FOR_EACH_VEC_ELT (*groups, i, grp)
9853 gcc_assert (head);
9855 if (new_grp_tail && old_succs[i - 1] == old_heads[i])
9857 /* a {b c d} {e f g} h i j (original)
9859 a {k l m} {e f g} h i j (inserted new group on last iter)
9861 a {k l m} {n o p} h i j (this time, chain last group to new one)
9862 ^new_grp_tail
9864 *new_grp_tail = new_heads[i];
9866 else if (new_grp_tail)
9868 /* a {b c d} e {f g h} i j k (original)
9870 a {l m n} e {f g h} i j k (gap after last iter's group)
9872 a {l m n} e {o p q} h i j (chain last group to old successor)
9873 ^new_grp_tail
9875 *new_grp_tail = old_succs[i - 1];
9876 *old_headps[i] = new_heads[i];
9878 else
9880 /* The first inserted group -- point to new group, and leave end
9881 open.
9882 a {b c d} e f
9884 a {g h i...
9886 *grp->grp_start = new_heads[i];
9889 new_grp_tail = &OMP_CLAUSE_CHAIN (head->grp_end);
9891 head = head->next;
9894 if (new_grp_tail)
9895 *new_grp_tail = old_succs[numgroups - 1];
9897 gcc_assert (!head);
9899 return map_at_start ? (*groups)[0].grp_start : list_p;
9902 /* DECL is supposed to have lastprivate semantics in the outer contexts
9903 of combined/composite constructs, starting with OCTX.
9904 Add needed lastprivate, shared or map clause if no data sharing or
9905 mapping clause are present. IMPLICIT_P is true if it is an implicit
9906 clause (IV on simd), in which case the lastprivate will not be
9907 copied to some constructs. */
9909 static void
9910 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
9911 tree decl, bool implicit_p)
9913 struct gimplify_omp_ctx *orig_octx = octx;
9914 for (; octx; octx = octx->outer_context)
9916 if ((octx->region_type == ORT_COMBINED_PARALLEL
9917 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
9918 && splay_tree_lookup (octx->variables,
9919 (splay_tree_key) decl) == NULL)
9921 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
9922 continue;
9924 if ((octx->region_type & ORT_TASK) != 0
9925 && octx->combined_loop
9926 && splay_tree_lookup (octx->variables,
9927 (splay_tree_key) decl) == NULL)
9929 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9930 continue;
9932 if (implicit_p
9933 && octx->region_type == ORT_WORKSHARE
9934 && octx->combined_loop
9935 && splay_tree_lookup (octx->variables,
9936 (splay_tree_key) decl) == NULL
9937 && octx->outer_context
9938 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
9939 && splay_tree_lookup (octx->outer_context->variables,
9940 (splay_tree_key) decl) == NULL)
9942 octx = octx->outer_context;
9943 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9944 continue;
9946 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
9947 && octx->combined_loop
9948 && splay_tree_lookup (octx->variables,
9949 (splay_tree_key) decl) == NULL
9950 && !omp_check_private (octx, decl, false))
9952 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9953 continue;
9955 if (octx->region_type == ORT_COMBINED_TARGET)
9957 splay_tree_node n = splay_tree_lookup (octx->variables,
9958 (splay_tree_key) decl);
9959 if (n == NULL)
9961 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9962 octx = octx->outer_context;
9964 else if (!implicit_p
9965 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
9967 n->value &= ~(GOVD_FIRSTPRIVATE
9968 | GOVD_FIRSTPRIVATE_IMPLICIT
9969 | GOVD_EXPLICIT);
9970 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9971 octx = octx->outer_context;
9974 break;
9976 if (octx && (implicit_p || octx != orig_octx))
9977 omp_notice_variable (octx, decl, true);
9980 /* If we have mappings INNER and OUTER, where INNER is a component access and
9981 OUTER is a mapping of the whole containing struct, check that the mappings
9982 are compatible. We'll be deleting the inner mapping, so we need to make
9983 sure the outer mapping does (at least) the same transfers to/from the device
9984 as the inner mapping. */
9986 bool
9987 omp_check_mapping_compatibility (location_t loc,
9988 omp_mapping_group *outer,
9989 omp_mapping_group *inner)
9991 tree first_outer = *outer->grp_start, first_inner = *inner->grp_start;
9993 gcc_assert (OMP_CLAUSE_CODE (first_outer) == OMP_CLAUSE_MAP);
9994 gcc_assert (OMP_CLAUSE_CODE (first_inner) == OMP_CLAUSE_MAP);
9996 enum gomp_map_kind outer_kind = OMP_CLAUSE_MAP_KIND (first_outer);
9997 enum gomp_map_kind inner_kind = OMP_CLAUSE_MAP_KIND (first_inner);
9999 if (outer_kind == inner_kind)
10000 return true;
10002 switch (outer_kind)
10004 case GOMP_MAP_ALWAYS_TO:
10005 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10006 || inner_kind == GOMP_MAP_ALLOC
10007 || inner_kind == GOMP_MAP_TO)
10008 return true;
10009 break;
10011 case GOMP_MAP_ALWAYS_FROM:
10012 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10013 || inner_kind == GOMP_MAP_ALLOC
10014 || inner_kind == GOMP_MAP_FROM)
10015 return true;
10016 break;
10018 case GOMP_MAP_TO:
10019 case GOMP_MAP_FROM:
10020 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10021 || inner_kind == GOMP_MAP_ALLOC)
10022 return true;
10023 break;
10025 case GOMP_MAP_ALWAYS_TOFROM:
10026 case GOMP_MAP_TOFROM:
10027 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10028 || inner_kind == GOMP_MAP_ALLOC
10029 || inner_kind == GOMP_MAP_TO
10030 || inner_kind == GOMP_MAP_FROM
10031 || inner_kind == GOMP_MAP_TOFROM)
10032 return true;
10033 break;
10035 default:
10039 error_at (loc, "data movement for component %qE is not compatible with "
10040 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner),
10041 OMP_CLAUSE_DECL (first_outer));
10043 return false;
10046 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
10047 clause dependencies we handle for now are struct element mappings and
10048 whole-struct mappings on the same directive, and duplicate clause
10049 detection. */
10051 void
10052 oacc_resolve_clause_dependencies (vec<omp_mapping_group> *groups,
10053 hash_map<tree_operand_hash_no_se,
10054 omp_mapping_group *> *grpmap)
10056 int i;
10057 omp_mapping_group *grp;
10058 hash_set<tree_operand_hash> *seen_components = NULL;
10059 hash_set<tree_operand_hash> *shown_error = NULL;
10061 FOR_EACH_VEC_ELT (*groups, i, grp)
10063 tree grp_end = grp->grp_end;
10064 tree decl = OMP_CLAUSE_DECL (grp_end);
10066 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
10068 if (DECL_P (grp_end))
10069 continue;
10071 tree c = OMP_CLAUSE_DECL (*grp->grp_start);
10072 while (TREE_CODE (c) == ARRAY_REF)
10073 c = TREE_OPERAND (c, 0);
10074 if (TREE_CODE (c) != COMPONENT_REF)
10075 continue;
10076 if (!seen_components)
10077 seen_components = new hash_set<tree_operand_hash> ();
10078 if (!shown_error)
10079 shown_error = new hash_set<tree_operand_hash> ();
10080 if (seen_components->contains (c)
10081 && !shown_error->contains (c))
10083 error_at (OMP_CLAUSE_LOCATION (grp_end),
10084 "%qE appears more than once in map clauses",
10085 OMP_CLAUSE_DECL (grp_end));
10086 shown_error->add (c);
10088 else
10089 seen_components->add (c);
10091 omp_mapping_group *struct_group;
10092 if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
10093 && *grp->grp_start == grp_end)
10095 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10096 struct_group, grp);
10097 /* Remove the whole of this mapping -- redundant. */
10098 grp->deleted = true;
10102 if (seen_components)
10103 delete seen_components;
10104 if (shown_error)
10105 delete shown_error;
10108 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10109 is linked to the previous node pointed to by INSERT_AT. */
10111 static tree *
10112 omp_siblist_insert_node_after (tree newnode, tree *insert_at)
10114 OMP_CLAUSE_CHAIN (newnode) = *insert_at;
10115 *insert_at = newnode;
10116 return &OMP_CLAUSE_CHAIN (newnode);
10119 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10120 pointed to by chain MOVE_AFTER instead. */
10122 static void
10123 omp_siblist_move_node_after (tree node, tree *old_pos, tree *move_after)
10125 gcc_assert (node == *old_pos);
10126 *old_pos = OMP_CLAUSE_CHAIN (node);
10127 OMP_CLAUSE_CHAIN (node) = *move_after;
10128 *move_after = node;
10131 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10132 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10133 new nodes are prepended to the list before splicing into the new position.
10134 Return the position we should continue scanning the list at, or NULL to
10135 stay where we were. */
10137 static tree *
10138 omp_siblist_move_nodes_after (tree *first_ptr, tree last_node,
10139 tree *move_after)
10141 if (first_ptr == move_after)
10142 return NULL;
10144 tree tmp = *first_ptr;
10145 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10146 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10147 *move_after = tmp;
10149 return first_ptr;
10152 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10153 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10154 pointer MOVE_AFTER.
10156 The latter list was previously part of the OMP clause list, and the former
10157 (prepended) part is comprised of new nodes.
10159 We start with a list of nodes starting with a struct mapping node. We
10160 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10161 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10162 the group of mapping nodes we are currently processing (from the chain
10163 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10164 we should continue processing from, or NULL to stay where we were.
10166 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10167 different) is worked through below. Here we are processing LAST_NODE, and
10168 FIRST_PTR points at the preceding mapping clause:
10170 #. mapping node chain
10171 ---------------------------------------------------
10172 A. struct_node [->B]
10173 B. comp_1 [->C]
10174 C. comp_2 [->D (move_after)]
10175 D. map_to_3 [->E]
10176 E. attach_3 [->F (first_ptr)]
10177 F. map_to_4 [->G (continue_at)]
10178 G. attach_4 (last_node) [->H]
10179 H. ...
10181 *last_new_tail = *first_ptr;
10183 I. new_node (first_new) [->F (last_new_tail)]
10185 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10187 #. mapping node chain
10188 ----------------------------------------------------
10189 A. struct_node [->B]
10190 B. comp_1 [->C]
10191 C. comp_2 [->D (move_after)]
10192 D. map_to_3 [->E]
10193 E. attach_3 [->H (first_ptr)]
10194 F. map_to_4 [->G (continue_at)]
10195 G. attach_4 (last_node) [->H]
10196 H. ...
10198 I. new_node (first_new) [->F (last_new_tail)]
10200 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10202 #. mapping node chain
10203 ---------------------------------------------------
10204 A. struct_node [->B]
10205 B. comp_1 [->C]
10206 C. comp_2 [->D (move_after)]
10207 D. map_to_3 [->E]
10208 E. attach_3 [->H (continue_at)]
10209 F. map_to_4 [->G]
10210 G. attach_4 (last_node) [->D]
10211 H. ...
10213 I. new_node (first_new) [->F (last_new_tail)]
10215 *move_after = first_new;
10217 #. mapping node chain
10218 ---------------------------------------------------
10219 A. struct_node [->B]
10220 B. comp_1 [->C]
10221 C. comp_2 [->I (move_after)]
10222 D. map_to_3 [->E]
10223 E. attach_3 [->H (continue_at)]
10224 F. map_to_4 [->G]
10225 G. attach_4 (last_node) [->D]
10226 H. ...
10227 I. new_node (first_new) [->F (last_new_tail)]
10229 or, in order:
10231 #. mapping node chain
10232 ---------------------------------------------------
10233 A. struct_node [->B]
10234 B. comp_1 [->C]
10235 C. comp_2 [->I (move_after)]
10236 I. new_node (first_new) [->F (last_new_tail)]
10237 F. map_to_4 [->G]
10238 G. attach_4 (last_node) [->D]
10239 D. map_to_3 [->E]
10240 E. attach_3 [->H (continue_at)]
10241 H. ...
10244 static tree *
10245 omp_siblist_move_concat_nodes_after (tree first_new, tree *last_new_tail,
10246 tree *first_ptr, tree last_node,
10247 tree *move_after)
10249 tree *continue_at = NULL;
10250 *last_new_tail = *first_ptr;
10251 if (first_ptr == move_after)
10252 *move_after = first_new;
10253 else
10255 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10256 continue_at = first_ptr;
10257 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10258 *move_after = first_new;
10260 return continue_at;
10263 /* Mapping struct members causes an additional set of nodes to be created,
10264 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10265 number of members being mapped, in order of ascending position (address or
10266 bitwise).
10268 We scan through the list of mapping clauses, calling this function for each
10269 struct member mapping we find, and build up the list of mappings after the
10270 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10271 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10272 moved into place in the sorted list.
10274 struct {
10275 int *a;
10276 int *b;
10277 int c;
10278 int *d;
10281 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
10282 struct.d[0:n])
10284 GOMP_MAP_STRUCT (4)
10285 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
10286 GOMP_MAP_ALLOC (struct.a)
10287 GOMP_MAP_ALLOC (struct.b)
10288 GOMP_MAP_TO (struct.c)
10289 GOMP_MAP_ALLOC (struct.d)
10292 In the case where we are mapping references to pointers, or in Fortran if
10293 we are mapping an array with a descriptor, additional nodes may be created
10294 after the struct node list also.
10296 The return code is either a pointer to the next node to process (if the
10297 list has been rearranged), else NULL to continue with the next node in the
10298 original list. */
10300 static tree *
10301 omp_accumulate_sibling_list (enum omp_region_type region_type,
10302 enum tree_code code,
10303 hash_map<tree_operand_hash, tree>
10304 *&struct_map_to_clause, tree *grp_start_p,
10305 tree grp_end, tree *inner)
10307 poly_offset_int coffset;
10308 poly_int64 cbitpos;
10309 tree ocd = OMP_CLAUSE_DECL (grp_end);
10310 bool openmp = !(region_type & ORT_ACC);
10311 tree *continue_at = NULL;
10313 while (TREE_CODE (ocd) == ARRAY_REF)
10314 ocd = TREE_OPERAND (ocd, 0);
10316 if (INDIRECT_REF_P (ocd))
10317 ocd = TREE_OPERAND (ocd, 0);
10319 tree base = extract_base_bit_offset (ocd, &cbitpos, &coffset);
10321 bool ptr = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ALWAYS_POINTER);
10322 bool attach_detach = ((OMP_CLAUSE_MAP_KIND (grp_end)
10323 == GOMP_MAP_ATTACH_DETACH)
10324 || (OMP_CLAUSE_MAP_KIND (grp_end)
10325 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
10326 bool attach = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ATTACH
10327 || OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_DETACH);
10329 /* FIXME: If we're not mapping the base pointer in some other clause on this
10330 directive, I think we want to create ALLOC/RELEASE here -- i.e. not
10331 early-exit. */
10332 if (openmp && attach_detach)
10333 return NULL;
10335 if (!struct_map_to_clause || struct_map_to_clause->get (base) == NULL)
10337 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
10338 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT : GOMP_MAP_STRUCT;
10340 OMP_CLAUSE_SET_MAP_KIND (l, k);
10342 OMP_CLAUSE_DECL (l) = unshare_expr (base);
10344 OMP_CLAUSE_SIZE (l)
10345 = (!attach ? size_int (1)
10346 : (DECL_P (OMP_CLAUSE_DECL (l))
10347 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
10348 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l)))));
10349 if (struct_map_to_clause == NULL)
10350 struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
10351 struct_map_to_clause->put (base, l);
10353 if (ptr || attach_detach)
10355 tree extra_node;
10356 tree alloc_node
10357 = build_omp_struct_comp_nodes (code, *grp_start_p, grp_end,
10358 &extra_node);
10359 OMP_CLAUSE_CHAIN (l) = alloc_node;
10361 tree *insert_node_pos = grp_start_p;
10363 if (extra_node)
10365 OMP_CLAUSE_CHAIN (extra_node) = *insert_node_pos;
10366 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10368 else
10369 OMP_CLAUSE_CHAIN (alloc_node) = *insert_node_pos;
10371 *insert_node_pos = l;
10373 else
10375 gcc_assert (*grp_start_p == grp_end);
10376 grp_start_p = omp_siblist_insert_node_after (l, grp_start_p);
10379 tree noind = omp_strip_indirections (base);
10381 if (!openmp
10382 && (region_type & ORT_TARGET)
10383 && TREE_CODE (noind) == COMPONENT_REF)
10385 /* The base for this component access is a struct component access
10386 itself. Insert a node to be processed on the next iteration of
10387 our caller's loop, which will subsequently be turned into a new,
10388 inner GOMP_MAP_STRUCT mapping.
10390 We need to do this else the non-DECL_P base won't be
10391 rewritten correctly in the offloaded region. */
10392 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10393 OMP_CLAUSE_MAP);
10394 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FORCE_PRESENT);
10395 OMP_CLAUSE_DECL (c2) = unshare_expr (noind);
10396 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (TREE_TYPE (noind));
10397 *inner = c2;
10398 return NULL;
10401 tree sdecl = omp_strip_components_and_deref (base);
10403 if (POINTER_TYPE_P (TREE_TYPE (sdecl)) && (region_type & ORT_TARGET))
10405 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10406 OMP_CLAUSE_MAP);
10407 bool base_ref
10408 = (INDIRECT_REF_P (base)
10409 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
10410 == REFERENCE_TYPE)
10411 || (INDIRECT_REF_P (TREE_OPERAND (base, 0))
10412 && (TREE_CODE (TREE_TYPE (TREE_OPERAND
10413 (TREE_OPERAND (base, 0), 0)))
10414 == REFERENCE_TYPE))));
10415 enum gomp_map_kind mkind = base_ref ? GOMP_MAP_FIRSTPRIVATE_REFERENCE
10416 : GOMP_MAP_FIRSTPRIVATE_POINTER;
10417 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
10418 OMP_CLAUSE_DECL (c2) = sdecl;
10419 tree baddr = build_fold_addr_expr (base);
10420 baddr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10421 ptrdiff_type_node, baddr);
10422 /* This isn't going to be good enough when we add support for more
10423 complicated lvalue expressions. FIXME. */
10424 if (TREE_CODE (TREE_TYPE (sdecl)) == REFERENCE_TYPE
10425 && TREE_CODE (TREE_TYPE (TREE_TYPE (sdecl))) == POINTER_TYPE)
10426 sdecl = build_simple_mem_ref (sdecl);
10427 tree decladdr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10428 ptrdiff_type_node, sdecl);
10429 OMP_CLAUSE_SIZE (c2)
10430 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
10431 ptrdiff_type_node, baddr, decladdr);
10432 /* Insert after struct node. */
10433 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
10434 OMP_CLAUSE_CHAIN (l) = c2;
10437 return NULL;
10439 else if (struct_map_to_clause)
10441 tree *osc = struct_map_to_clause->get (base);
10442 tree *sc = NULL, *scp = NULL;
10443 sc = &OMP_CLAUSE_CHAIN (*osc);
10444 /* The struct mapping might be immediately followed by a
10445 FIRSTPRIVATE_POINTER and/or FIRSTPRIVATE_REFERENCE -- if it's an
10446 indirect access or a reference, or both. (This added node is removed
10447 in omp-low.c after it has been processed there.) */
10448 if (*sc != grp_end
10449 && (OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_POINTER
10450 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10451 sc = &OMP_CLAUSE_CHAIN (*sc);
10452 for (; *sc != grp_end; sc = &OMP_CLAUSE_CHAIN (*sc))
10453 if ((ptr || attach_detach) && sc == grp_start_p)
10454 break;
10455 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) != COMPONENT_REF
10456 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != INDIRECT_REF
10457 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != ARRAY_REF)
10458 break;
10459 else
10461 tree sc_decl = OMP_CLAUSE_DECL (*sc);
10462 poly_offset_int offset;
10463 poly_int64 bitpos;
10465 if (TREE_CODE (sc_decl) == ARRAY_REF)
10467 while (TREE_CODE (sc_decl) == ARRAY_REF)
10468 sc_decl = TREE_OPERAND (sc_decl, 0);
10469 if (TREE_CODE (sc_decl) != COMPONENT_REF
10470 || TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
10471 break;
10473 else if (INDIRECT_REF_P (sc_decl)
10474 && TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
10475 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
10476 == REFERENCE_TYPE))
10477 sc_decl = TREE_OPERAND (sc_decl, 0);
10479 tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset);
10480 if (!base2 || !operand_equal_p (base2, base, 0))
10481 break;
10482 if (scp)
10483 continue;
10484 if (maybe_lt (coffset, offset)
10485 || (known_eq (coffset, offset)
10486 && maybe_lt (cbitpos, bitpos)))
10488 if (ptr || attach_detach)
10489 scp = sc;
10490 else
10491 break;
10495 if (!attach)
10496 OMP_CLAUSE_SIZE (*osc)
10497 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
10498 if (ptr || attach_detach)
10500 tree cl = NULL_TREE, extra_node;
10501 tree alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
10502 grp_end, &extra_node);
10503 tree *tail_chain = NULL;
10505 /* Here, we have:
10507 grp_end : the last (or only) node in this group.
10508 grp_start_p : pointer to the first node in a pointer mapping group
10509 up to and including GRP_END.
10510 sc : pointer to the chain for the end of the struct component
10511 list.
10512 scp : pointer to the chain for the sorted position at which we
10513 should insert in the middle of the struct component list
10514 (else NULL to insert at end).
10515 alloc_node : the "alloc" node for the structure (pointer-type)
10516 component. We insert at SCP (if present), else SC
10517 (the end of the struct component list).
10518 extra_node : a newly-synthesized node for an additional indirect
10519 pointer mapping or a Fortran pointer set, if needed.
10520 cl : first node to prepend before grp_start_p.
10521 tail_chain : pointer to chain of last prepended node.
10523 The general idea is we move the nodes for this struct mapping
10524 together: the alloc node goes into the sorted list directly after
10525 the struct mapping, and any extra nodes (together with the nodes
10526 mapping arrays pointed to by struct components) get moved after
10527 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
10528 the end of the struct component mapping list. It's important that
10529 the alloc_node comes first in that case because it's part of the
10530 sorted component mapping list (but subsequent nodes are not!). */
10532 if (scp)
10533 omp_siblist_insert_node_after (alloc_node, scp);
10535 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
10536 already inserted it) and the extra_node (if it is present). The
10537 list can be empty if we added alloc_node above and there is no
10538 extra node. */
10539 if (scp && extra_node)
10541 cl = extra_node;
10542 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10544 else if (extra_node)
10546 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10547 cl = alloc_node;
10548 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10550 else if (!scp)
10552 cl = alloc_node;
10553 tail_chain = &OMP_CLAUSE_CHAIN (alloc_node);
10556 continue_at
10557 = cl ? omp_siblist_move_concat_nodes_after (cl, tail_chain,
10558 grp_start_p, grp_end,
10560 : omp_siblist_move_nodes_after (grp_start_p, grp_end, sc);
10562 else if (*sc != grp_end)
10564 gcc_assert (*grp_start_p == grp_end);
10566 /* We are moving the current node back to a previous struct node:
10567 the node that used to point to the current node will now point to
10568 the next node. */
10569 continue_at = grp_start_p;
10570 /* In the non-pointer case, the mapping clause itself is moved into
10571 the correct position in the struct component list, which in this
10572 case is just SC. */
10573 omp_siblist_move_node_after (*grp_start_p, grp_start_p, sc);
10576 return continue_at;
10579 /* Scan through GROUPS, and create sorted structure sibling lists without
10580 gimplifying. */
10582 static bool
10583 omp_build_struct_sibling_lists (enum tree_code code,
10584 enum omp_region_type region_type,
10585 vec<omp_mapping_group> *groups,
10586 hash_map<tree_operand_hash_no_se,
10587 omp_mapping_group *> **grpmap,
10588 tree *list_p)
10590 unsigned i;
10591 omp_mapping_group *grp;
10592 hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
10593 bool success = true;
10594 tree *new_next = NULL;
10595 tree *tail = &OMP_CLAUSE_CHAIN ((*groups)[groups->length () - 1].grp_end);
10596 auto_vec<omp_mapping_group> pre_hwm_groups;
10598 FOR_EACH_VEC_ELT (*groups, i, grp)
10600 tree c = grp->grp_end;
10601 tree decl = OMP_CLAUSE_DECL (c);
10602 tree grp_end = grp->grp_end;
10603 tree sentinel = OMP_CLAUSE_CHAIN (grp_end);
10605 if (new_next)
10606 grp->grp_start = new_next;
10608 new_next = NULL;
10610 tree *grp_start_p = grp->grp_start;
10612 if (DECL_P (decl))
10613 continue;
10615 /* Skip groups we marked for deletion in
10616 oacc_resolve_clause_dependencies. */
10617 if (grp->deleted)
10618 continue;
10620 if (OMP_CLAUSE_CHAIN (*grp_start_p)
10621 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
10623 /* Don't process an array descriptor that isn't inside a derived type
10624 as a struct (the GOMP_MAP_POINTER following will have the form
10625 "var.data", but such mappings are handled specially). */
10626 tree grpmid = OMP_CLAUSE_CHAIN (*grp_start_p);
10627 if (OMP_CLAUSE_CODE (grpmid) == OMP_CLAUSE_MAP
10628 && OMP_CLAUSE_MAP_KIND (grpmid) == GOMP_MAP_TO_PSET
10629 && DECL_P (OMP_CLAUSE_DECL (grpmid)))
10630 continue;
10633 tree d = decl;
10634 if (TREE_CODE (d) == ARRAY_REF)
10636 while (TREE_CODE (d) == ARRAY_REF)
10637 d = TREE_OPERAND (d, 0);
10638 if (TREE_CODE (d) == COMPONENT_REF
10639 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
10640 decl = d;
10642 if (d == decl
10643 && INDIRECT_REF_P (decl)
10644 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10645 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10646 == REFERENCE_TYPE)
10647 && (OMP_CLAUSE_MAP_KIND (c)
10648 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
10649 decl = TREE_OPERAND (decl, 0);
10651 STRIP_NOPS (decl);
10653 if (TREE_CODE (decl) != COMPONENT_REF)
10654 continue;
10656 /* If we're mapping the whole struct in another node, skip adding this
10657 node to a sibling list. */
10658 omp_mapping_group *wholestruct;
10659 if (omp_mapped_by_containing_struct (*grpmap, OMP_CLAUSE_DECL (c),
10660 &wholestruct))
10662 if (!(region_type & ORT_ACC)
10663 && *grp_start_p == grp_end)
10664 /* Remove the whole of this mapping -- redundant. */
10665 grp->deleted = true;
10667 continue;
10670 if (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
10671 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
10672 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
10673 && code != OACC_UPDATE
10674 && code != OMP_TARGET_UPDATE)
10676 if (error_operand_p (decl))
10678 success = false;
10679 goto error_out;
10682 tree stype = TREE_TYPE (decl);
10683 if (TREE_CODE (stype) == REFERENCE_TYPE)
10684 stype = TREE_TYPE (stype);
10685 if (TYPE_SIZE_UNIT (stype) == NULL
10686 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
10688 error_at (OMP_CLAUSE_LOCATION (c),
10689 "mapping field %qE of variable length "
10690 "structure", OMP_CLAUSE_DECL (c));
10691 success = false;
10692 goto error_out;
10695 tree inner = NULL_TREE;
10697 new_next
10698 = omp_accumulate_sibling_list (region_type, code,
10699 struct_map_to_clause, grp_start_p,
10700 grp_end, &inner);
10702 if (inner)
10704 if (new_next && *new_next == NULL_TREE)
10705 *new_next = inner;
10706 else
10707 *tail = inner;
10709 OMP_CLAUSE_CHAIN (inner) = NULL_TREE;
10710 omp_mapping_group newgrp;
10711 newgrp.grp_start = new_next ? new_next : tail;
10712 newgrp.grp_end = inner;
10713 newgrp.mark = UNVISITED;
10714 newgrp.sibling = NULL;
10715 newgrp.deleted = false;
10716 newgrp.next = NULL;
10717 groups->safe_push (newgrp);
10719 /* !!! Growing GROUPS might invalidate the pointers in the group
10720 map. Rebuild it here. This is a bit inefficient, but
10721 shouldn't happen very often. */
10722 delete (*grpmap);
10723 *grpmap
10724 = omp_reindex_mapping_groups (list_p, groups, &pre_hwm_groups,
10725 sentinel);
10727 tail = &OMP_CLAUSE_CHAIN (inner);
10732 /* Delete groups marked for deletion above. At this point the order of the
10733 groups may no longer correspond to the order of the underlying list,
10734 which complicates this a little. First clear out OMP_CLAUSE_DECL for
10735 deleted nodes... */
10737 FOR_EACH_VEC_ELT (*groups, i, grp)
10738 if (grp->deleted)
10739 for (tree d = *grp->grp_start;
10740 d != OMP_CLAUSE_CHAIN (grp->grp_end);
10741 d = OMP_CLAUSE_CHAIN (d))
10742 OMP_CLAUSE_DECL (d) = NULL_TREE;
10744 /* ...then sweep through the list removing the now-empty nodes. */
10746 tail = list_p;
10747 while (*tail)
10749 if (OMP_CLAUSE_CODE (*tail) == OMP_CLAUSE_MAP
10750 && OMP_CLAUSE_DECL (*tail) == NULL_TREE)
10751 *tail = OMP_CLAUSE_CHAIN (*tail);
10752 else
10753 tail = &OMP_CLAUSE_CHAIN (*tail);
10756 error_out:
10757 if (struct_map_to_clause)
10758 delete struct_map_to_clause;
10760 return success;
10763 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
10764 and previous omp contexts. */
10766 static void
10767 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
10768 enum omp_region_type region_type,
10769 enum tree_code code)
10771 struct gimplify_omp_ctx *ctx, *outer_ctx;
10772 tree c;
10773 tree *orig_list_p = list_p;
10774 int handled_depend_iterators = -1;
10775 int nowait = -1;
10777 ctx = new_omp_context (region_type);
10778 ctx->code = code;
10779 outer_ctx = ctx->outer_context;
10780 if (code == OMP_TARGET)
10782 if (!lang_GNU_Fortran ())
10783 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
10784 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
10785 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
10786 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
10788 if (!lang_GNU_Fortran ())
10789 switch (code)
10791 case OMP_TARGET:
10792 case OMP_TARGET_DATA:
10793 case OMP_TARGET_ENTER_DATA:
10794 case OMP_TARGET_EXIT_DATA:
10795 case OACC_DECLARE:
10796 case OACC_HOST_DATA:
10797 case OACC_PARALLEL:
10798 case OACC_KERNELS:
10799 ctx->target_firstprivatize_array_bases = true;
10800 default:
10801 break;
10804 if (code == OMP_TARGET
10805 || code == OMP_TARGET_DATA
10806 || code == OMP_TARGET_ENTER_DATA
10807 || code == OMP_TARGET_EXIT_DATA)
10809 vec<omp_mapping_group> *groups;
10810 groups = omp_gather_mapping_groups (list_p);
10811 if (groups)
10813 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
10814 grpmap = omp_index_mapping_groups (groups);
10816 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
10817 list_p);
10819 omp_mapping_group *outlist = NULL;
10821 /* Topological sorting may fail if we have duplicate nodes, which
10822 we should have detected and shown an error for already. Skip
10823 sorting in that case. */
10824 if (seen_error ())
10825 goto failure;
10827 delete grpmap;
10828 delete groups;
10830 /* Rebuild now we have struct sibling lists. */
10831 groups = omp_gather_mapping_groups (list_p);
10832 grpmap = omp_index_mapping_groups (groups);
10834 outlist = omp_tsort_mapping_groups (groups, grpmap);
10835 outlist = omp_segregate_mapping_groups (outlist);
10836 list_p = omp_reorder_mapping_groups (groups, outlist, list_p);
10838 failure:
10839 delete grpmap;
10840 delete groups;
10843 /* OpenMP map clauses with 'present' need to go in front of those
10844 without. */
10845 tree present_map_head = NULL;
10846 tree *present_map_tail_p = &present_map_head;
10847 tree *first_map_clause_p = NULL;
10849 for (tree *c_p = list_p; *c_p; )
10851 tree c = *c_p;
10852 tree *next_c_p = &OMP_CLAUSE_CHAIN (c);
10854 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
10856 if (!first_map_clause_p)
10857 first_map_clause_p = c_p;
10858 switch (OMP_CLAUSE_MAP_KIND (c))
10860 case GOMP_MAP_PRESENT_ALLOC:
10861 case GOMP_MAP_PRESENT_FROM:
10862 case GOMP_MAP_PRESENT_TO:
10863 case GOMP_MAP_PRESENT_TOFROM:
10864 next_c_p = c_p;
10865 *c_p = OMP_CLAUSE_CHAIN (c);
10867 OMP_CLAUSE_CHAIN (c) = NULL;
10868 *present_map_tail_p = c;
10869 present_map_tail_p = &OMP_CLAUSE_CHAIN (c);
10871 break;
10873 default:
10874 break;
10878 c_p = next_c_p;
10880 if (first_map_clause_p && present_map_head)
10882 tree next = *first_map_clause_p;
10883 *first_map_clause_p = present_map_head;
10884 *present_map_tail_p = next;
10887 else if (region_type & ORT_ACC)
10889 vec<omp_mapping_group> *groups;
10890 groups = omp_gather_mapping_groups (list_p);
10891 if (groups)
10893 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
10894 grpmap = omp_index_mapping_groups (groups);
10896 oacc_resolve_clause_dependencies (groups, grpmap);
10897 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
10898 list_p);
10900 delete groups;
10901 delete grpmap;
10905 while ((c = *list_p) != NULL)
10907 bool remove = false;
10908 bool notice_outer = true;
10909 const char *check_non_private = NULL;
10910 unsigned int flags;
10911 tree decl;
10913 switch (OMP_CLAUSE_CODE (c))
10915 case OMP_CLAUSE_PRIVATE:
10916 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
10917 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
10919 flags |= GOVD_PRIVATE_OUTER_REF;
10920 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
10922 else
10923 notice_outer = false;
10924 goto do_add;
10925 case OMP_CLAUSE_SHARED:
10926 flags = GOVD_SHARED | GOVD_EXPLICIT;
10927 goto do_add;
10928 case OMP_CLAUSE_FIRSTPRIVATE:
10929 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
10930 check_non_private = "firstprivate";
10931 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
10933 gcc_assert (code == OMP_TARGET);
10934 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
10936 goto do_add;
10937 case OMP_CLAUSE_LASTPRIVATE:
10938 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10939 switch (code)
10941 case OMP_DISTRIBUTE:
10942 error_at (OMP_CLAUSE_LOCATION (c),
10943 "conditional %<lastprivate%> clause on "
10944 "%qs construct", "distribute");
10945 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10946 break;
10947 case OMP_TASKLOOP:
10948 error_at (OMP_CLAUSE_LOCATION (c),
10949 "conditional %<lastprivate%> clause on "
10950 "%qs construct", "taskloop");
10951 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10952 break;
10953 default:
10954 break;
10956 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
10957 if (code != OMP_LOOP)
10958 check_non_private = "lastprivate";
10959 decl = OMP_CLAUSE_DECL (c);
10960 if (error_operand_p (decl))
10961 goto do_add;
10962 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
10963 && !lang_hooks.decls.omp_scalar_p (decl, true))
10965 error_at (OMP_CLAUSE_LOCATION (c),
10966 "non-scalar variable %qD in conditional "
10967 "%<lastprivate%> clause", decl);
10968 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10970 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10971 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
10972 omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
10973 false);
10974 goto do_add;
10975 case OMP_CLAUSE_REDUCTION:
10976 if (OMP_CLAUSE_REDUCTION_TASK (c))
10978 if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
10980 if (nowait == -1)
10981 nowait = omp_find_clause (*list_p,
10982 OMP_CLAUSE_NOWAIT) != NULL_TREE;
10983 if (nowait
10984 && (outer_ctx == NULL
10985 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
10987 error_at (OMP_CLAUSE_LOCATION (c),
10988 "%<task%> reduction modifier on a construct "
10989 "with a %<nowait%> clause");
10990 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
10993 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
10995 error_at (OMP_CLAUSE_LOCATION (c),
10996 "invalid %<task%> reduction modifier on construct "
10997 "other than %<parallel%>, %qs, %<sections%> or "
10998 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
10999 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
11002 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
11003 switch (code)
11005 case OMP_SECTIONS:
11006 error_at (OMP_CLAUSE_LOCATION (c),
11007 "%<inscan%> %<reduction%> clause on "
11008 "%qs construct", "sections");
11009 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11010 break;
11011 case OMP_PARALLEL:
11012 error_at (OMP_CLAUSE_LOCATION (c),
11013 "%<inscan%> %<reduction%> clause on "
11014 "%qs construct", "parallel");
11015 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11016 break;
11017 case OMP_TEAMS:
11018 error_at (OMP_CLAUSE_LOCATION (c),
11019 "%<inscan%> %<reduction%> clause on "
11020 "%qs construct", "teams");
11021 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11022 break;
11023 case OMP_TASKLOOP:
11024 error_at (OMP_CLAUSE_LOCATION (c),
11025 "%<inscan%> %<reduction%> clause on "
11026 "%qs construct", "taskloop");
11027 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11028 break;
11029 case OMP_SCOPE:
11030 error_at (OMP_CLAUSE_LOCATION (c),
11031 "%<inscan%> %<reduction%> clause on "
11032 "%qs construct", "scope");
11033 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11034 break;
11035 default:
11036 break;
11038 /* FALLTHRU */
11039 case OMP_CLAUSE_IN_REDUCTION:
11040 case OMP_CLAUSE_TASK_REDUCTION:
11041 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
11042 /* OpenACC permits reductions on private variables. */
11043 if (!(region_type & ORT_ACC)
11044 /* taskgroup is actually not a worksharing region. */
11045 && code != OMP_TASKGROUP)
11046 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
11047 decl = OMP_CLAUSE_DECL (c);
11048 if (TREE_CODE (decl) == MEM_REF)
11050 tree type = TREE_TYPE (decl);
11051 bool saved_into_ssa = gimplify_ctxp->into_ssa;
11052 gimplify_ctxp->into_ssa = false;
11053 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
11054 NULL, is_gimple_val, fb_rvalue, false)
11055 == GS_ERROR)
11057 gimplify_ctxp->into_ssa = saved_into_ssa;
11058 remove = true;
11059 break;
11061 gimplify_ctxp->into_ssa = saved_into_ssa;
11062 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
11063 if (DECL_P (v))
11065 omp_firstprivatize_variable (ctx, v);
11066 omp_notice_variable (ctx, v, true);
11068 decl = TREE_OPERAND (decl, 0);
11069 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
11071 gimplify_ctxp->into_ssa = false;
11072 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
11073 NULL, is_gimple_val, fb_rvalue, false)
11074 == GS_ERROR)
11076 gimplify_ctxp->into_ssa = saved_into_ssa;
11077 remove = true;
11078 break;
11080 gimplify_ctxp->into_ssa = saved_into_ssa;
11081 v = TREE_OPERAND (decl, 1);
11082 if (DECL_P (v))
11084 omp_firstprivatize_variable (ctx, v);
11085 omp_notice_variable (ctx, v, true);
11087 decl = TREE_OPERAND (decl, 0);
11089 if (TREE_CODE (decl) == ADDR_EXPR
11090 || TREE_CODE (decl) == INDIRECT_REF)
11091 decl = TREE_OPERAND (decl, 0);
11093 goto do_add_decl;
11094 case OMP_CLAUSE_LINEAR:
11095 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
11096 is_gimple_val, fb_rvalue) == GS_ERROR)
11098 remove = true;
11099 break;
11101 else
11103 if (code == OMP_SIMD
11104 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11106 struct gimplify_omp_ctx *octx = outer_ctx;
11107 if (octx
11108 && octx->region_type == ORT_WORKSHARE
11109 && octx->combined_loop
11110 && !octx->distribute)
11112 if (octx->outer_context
11113 && (octx->outer_context->region_type
11114 == ORT_COMBINED_PARALLEL))
11115 octx = octx->outer_context->outer_context;
11116 else
11117 octx = octx->outer_context;
11119 if (octx
11120 && octx->region_type == ORT_WORKSHARE
11121 && octx->combined_loop
11122 && octx->distribute)
11124 error_at (OMP_CLAUSE_LOCATION (c),
11125 "%<linear%> clause for variable other than "
11126 "loop iterator specified on construct "
11127 "combined with %<distribute%>");
11128 remove = true;
11129 break;
11132 /* For combined #pragma omp parallel for simd, need to put
11133 lastprivate and perhaps firstprivate too on the
11134 parallel. Similarly for #pragma omp for simd. */
11135 struct gimplify_omp_ctx *octx = outer_ctx;
11136 bool taskloop_seen = false;
11137 decl = NULL_TREE;
11140 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11141 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11142 break;
11143 decl = OMP_CLAUSE_DECL (c);
11144 if (error_operand_p (decl))
11146 decl = NULL_TREE;
11147 break;
11149 flags = GOVD_SEEN;
11150 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11151 flags |= GOVD_FIRSTPRIVATE;
11152 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11153 flags |= GOVD_LASTPRIVATE;
11154 if (octx
11155 && octx->region_type == ORT_WORKSHARE
11156 && octx->combined_loop)
11158 if (octx->outer_context
11159 && (octx->outer_context->region_type
11160 == ORT_COMBINED_PARALLEL))
11161 octx = octx->outer_context;
11162 else if (omp_check_private (octx, decl, false))
11163 break;
11165 else if (octx
11166 && (octx->region_type & ORT_TASK) != 0
11167 && octx->combined_loop)
11168 taskloop_seen = true;
11169 else if (octx
11170 && octx->region_type == ORT_COMBINED_PARALLEL
11171 && ((ctx->region_type == ORT_WORKSHARE
11172 && octx == outer_ctx)
11173 || taskloop_seen))
11174 flags = GOVD_SEEN | GOVD_SHARED;
11175 else if (octx
11176 && ((octx->region_type & ORT_COMBINED_TEAMS)
11177 == ORT_COMBINED_TEAMS))
11178 flags = GOVD_SEEN | GOVD_SHARED;
11179 else if (octx
11180 && octx->region_type == ORT_COMBINED_TARGET)
11182 if (flags & GOVD_LASTPRIVATE)
11183 flags = GOVD_SEEN | GOVD_MAP;
11185 else
11186 break;
11187 splay_tree_node on
11188 = splay_tree_lookup (octx->variables,
11189 (splay_tree_key) decl);
11190 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
11192 octx = NULL;
11193 break;
11195 omp_add_variable (octx, decl, flags);
11196 if (octx->outer_context == NULL)
11197 break;
11198 octx = octx->outer_context;
11200 while (1);
11201 if (octx
11202 && decl
11203 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11204 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
11205 omp_notice_variable (octx, decl, true);
11207 flags = GOVD_LINEAR | GOVD_EXPLICIT;
11208 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11209 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11211 notice_outer = false;
11212 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11214 goto do_add;
11216 case OMP_CLAUSE_MAP:
11217 decl = OMP_CLAUSE_DECL (c);
11218 if (error_operand_p (decl))
11219 remove = true;
11220 switch (code)
11222 case OMP_TARGET:
11223 break;
11224 case OACC_DATA:
11225 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
11226 break;
11227 /* FALLTHRU */
11228 case OMP_TARGET_DATA:
11229 case OMP_TARGET_ENTER_DATA:
11230 case OMP_TARGET_EXIT_DATA:
11231 case OACC_ENTER_DATA:
11232 case OACC_EXIT_DATA:
11233 case OACC_HOST_DATA:
11234 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11235 || (OMP_CLAUSE_MAP_KIND (c)
11236 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11237 /* For target {,enter ,exit }data only the array slice is
11238 mapped, but not the pointer to it. */
11239 remove = true;
11240 break;
11241 default:
11242 break;
11244 if (remove)
11245 break;
11246 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
11248 struct gimplify_omp_ctx *octx;
11249 for (octx = outer_ctx; octx; octx = octx->outer_context)
11251 if (octx->region_type != ORT_ACC_HOST_DATA)
11252 break;
11253 splay_tree_node n2
11254 = splay_tree_lookup (octx->variables,
11255 (splay_tree_key) decl);
11256 if (n2)
11257 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
11258 "declared in enclosing %<host_data%> region",
11259 DECL_NAME (decl));
11262 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11263 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11264 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11265 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11266 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11268 remove = true;
11269 break;
11271 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11272 || (OMP_CLAUSE_MAP_KIND (c)
11273 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11274 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11275 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
11277 OMP_CLAUSE_SIZE (c)
11278 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
11279 false);
11280 if ((region_type & ORT_TARGET) != 0)
11281 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
11282 GOVD_FIRSTPRIVATE | GOVD_SEEN);
11285 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
11287 tree base = omp_strip_components_and_deref (decl);
11288 if (DECL_P (base))
11290 decl = base;
11291 splay_tree_node n
11292 = splay_tree_lookup (ctx->variables,
11293 (splay_tree_key) decl);
11294 if (seen_error ()
11295 && n
11296 && (n->value & (GOVD_MAP | GOVD_FIRSTPRIVATE)) != 0)
11298 remove = true;
11299 break;
11301 flags = GOVD_MAP | GOVD_EXPLICIT;
11303 goto do_add_decl;
11307 if (TREE_CODE (decl) == TARGET_EXPR)
11309 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11310 is_gimple_lvalue, fb_lvalue)
11311 == GS_ERROR)
11312 remove = true;
11314 else if (!DECL_P (decl))
11316 tree d = decl, *pd;
11317 if (TREE_CODE (d) == ARRAY_REF)
11319 while (TREE_CODE (d) == ARRAY_REF)
11320 d = TREE_OPERAND (d, 0);
11321 if (TREE_CODE (d) == COMPONENT_REF
11322 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
11323 decl = d;
11325 pd = &OMP_CLAUSE_DECL (c);
11326 if (d == decl
11327 && TREE_CODE (decl) == INDIRECT_REF
11328 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
11329 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
11330 == REFERENCE_TYPE)
11331 && (OMP_CLAUSE_MAP_KIND (c)
11332 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
11334 pd = &TREE_OPERAND (decl, 0);
11335 decl = TREE_OPERAND (decl, 0);
11337 /* An "attach/detach" operation on an update directive should
11338 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
11339 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
11340 depends on the previous mapping. */
11341 if (code == OACC_UPDATE
11342 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11343 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
11345 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11347 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11348 == ARRAY_TYPE)
11349 remove = true;
11350 else
11352 gomp_map_kind k = ((code == OACC_EXIT_DATA
11353 || code == OMP_TARGET_EXIT_DATA)
11354 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
11355 OMP_CLAUSE_SET_MAP_KIND (c, k);
11359 tree cref = decl;
11361 while (TREE_CODE (cref) == ARRAY_REF)
11362 cref = TREE_OPERAND (cref, 0);
11364 if (TREE_CODE (cref) == INDIRECT_REF)
11365 cref = TREE_OPERAND (cref, 0);
11367 if (TREE_CODE (cref) == COMPONENT_REF)
11369 tree base = cref;
11370 while (base && !DECL_P (base))
11372 tree innerbase = omp_get_base_pointer (base);
11373 if (!innerbase)
11374 break;
11375 base = innerbase;
11377 if (base
11378 && DECL_P (base)
11379 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
11380 && POINTER_TYPE_P (TREE_TYPE (base)))
11382 splay_tree_node n
11383 = splay_tree_lookup (ctx->variables,
11384 (splay_tree_key) base);
11385 n->value |= GOVD_SEEN;
11389 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
11391 /* Don't gimplify *pd fully at this point, as the base
11392 will need to be adjusted during omp lowering. */
11393 auto_vec<tree, 10> expr_stack;
11394 tree *p = pd;
11395 while (handled_component_p (*p)
11396 || TREE_CODE (*p) == INDIRECT_REF
11397 || TREE_CODE (*p) == ADDR_EXPR
11398 || TREE_CODE (*p) == MEM_REF
11399 || TREE_CODE (*p) == NON_LVALUE_EXPR)
11401 expr_stack.safe_push (*p);
11402 p = &TREE_OPERAND (*p, 0);
11404 for (int i = expr_stack.length () - 1; i >= 0; i--)
11406 tree t = expr_stack[i];
11407 if (TREE_CODE (t) == ARRAY_REF
11408 || TREE_CODE (t) == ARRAY_RANGE_REF)
11410 if (TREE_OPERAND (t, 2) == NULL_TREE)
11412 tree low = unshare_expr (array_ref_low_bound (t));
11413 if (!is_gimple_min_invariant (low))
11415 TREE_OPERAND (t, 2) = low;
11416 if (gimplify_expr (&TREE_OPERAND (t, 2),
11417 pre_p, NULL,
11418 is_gimple_reg,
11419 fb_rvalue) == GS_ERROR)
11420 remove = true;
11423 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11424 NULL, is_gimple_reg,
11425 fb_rvalue) == GS_ERROR)
11426 remove = true;
11427 if (TREE_OPERAND (t, 3) == NULL_TREE)
11429 tree elmt_size = array_ref_element_size (t);
11430 if (!is_gimple_min_invariant (elmt_size))
11432 elmt_size = unshare_expr (elmt_size);
11433 tree elmt_type
11434 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
11435 0)));
11436 tree factor
11437 = size_int (TYPE_ALIGN_UNIT (elmt_type));
11438 elmt_size
11439 = size_binop (EXACT_DIV_EXPR, elmt_size,
11440 factor);
11441 TREE_OPERAND (t, 3) = elmt_size;
11442 if (gimplify_expr (&TREE_OPERAND (t, 3),
11443 pre_p, NULL,
11444 is_gimple_reg,
11445 fb_rvalue) == GS_ERROR)
11446 remove = true;
11449 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
11450 NULL, is_gimple_reg,
11451 fb_rvalue) == GS_ERROR)
11452 remove = true;
11454 else if (TREE_CODE (t) == COMPONENT_REF)
11456 if (TREE_OPERAND (t, 2) == NULL_TREE)
11458 tree offset = component_ref_field_offset (t);
11459 if (!is_gimple_min_invariant (offset))
11461 offset = unshare_expr (offset);
11462 tree field = TREE_OPERAND (t, 1);
11463 tree factor
11464 = size_int (DECL_OFFSET_ALIGN (field)
11465 / BITS_PER_UNIT);
11466 offset = size_binop (EXACT_DIV_EXPR, offset,
11467 factor);
11468 TREE_OPERAND (t, 2) = offset;
11469 if (gimplify_expr (&TREE_OPERAND (t, 2),
11470 pre_p, NULL,
11471 is_gimple_reg,
11472 fb_rvalue) == GS_ERROR)
11473 remove = true;
11476 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11477 NULL, is_gimple_reg,
11478 fb_rvalue) == GS_ERROR)
11479 remove = true;
11482 for (; expr_stack.length () > 0; )
11484 tree t = expr_stack.pop ();
11486 if (TREE_CODE (t) == ARRAY_REF
11487 || TREE_CODE (t) == ARRAY_RANGE_REF)
11489 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
11490 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
11491 NULL, is_gimple_val,
11492 fb_rvalue) == GS_ERROR)
11493 remove = true;
11497 else if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
11498 fb_lvalue) == GS_ERROR)
11500 remove = true;
11501 break;
11503 break;
11505 flags = GOVD_MAP | GOVD_EXPLICIT;
11506 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
11507 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
11508 flags |= GOVD_MAP_ALWAYS_TO;
11510 if ((code == OMP_TARGET
11511 || code == OMP_TARGET_DATA
11512 || code == OMP_TARGET_ENTER_DATA
11513 || code == OMP_TARGET_EXIT_DATA)
11514 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11516 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
11517 octx = octx->outer_context)
11519 splay_tree_node n
11520 = splay_tree_lookup (octx->variables,
11521 (splay_tree_key) OMP_CLAUSE_DECL (c));
11522 /* If this is contained in an outer OpenMP region as a
11523 firstprivate value, remove the attach/detach. */
11524 if (n && (n->value & GOVD_FIRSTPRIVATE))
11526 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
11527 goto do_add;
11531 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
11532 ? GOMP_MAP_DETACH
11533 : GOMP_MAP_ATTACH);
11534 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
11537 goto do_add;
11539 case OMP_CLAUSE_AFFINITY:
11540 gimplify_omp_affinity (list_p, pre_p);
11541 remove = true;
11542 break;
11543 case OMP_CLAUSE_DOACROSS:
11544 if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
11546 tree deps = OMP_CLAUSE_DECL (c);
11547 while (deps && TREE_CODE (deps) == TREE_LIST)
11549 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
11550 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
11551 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
11552 pre_p, NULL, is_gimple_val, fb_rvalue);
11553 deps = TREE_CHAIN (deps);
11556 else
11557 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c)
11558 == OMP_CLAUSE_DOACROSS_SOURCE);
11559 break;
11560 case OMP_CLAUSE_DEPEND:
11561 if (handled_depend_iterators == -1)
11562 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
11563 if (handled_depend_iterators)
11565 if (handled_depend_iterators == 2)
11566 remove = true;
11567 break;
11569 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
11571 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
11572 NULL, is_gimple_val, fb_rvalue);
11573 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
11575 if (error_operand_p (OMP_CLAUSE_DECL (c)))
11577 remove = true;
11578 break;
11580 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
11582 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
11583 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11584 is_gimple_val, fb_rvalue) == GS_ERROR)
11586 remove = true;
11587 break;
11590 if (code == OMP_TASK)
11591 ctx->has_depend = true;
11592 break;
11594 case OMP_CLAUSE_TO:
11595 case OMP_CLAUSE_FROM:
11596 case OMP_CLAUSE__CACHE_:
11597 decl = OMP_CLAUSE_DECL (c);
11598 if (error_operand_p (decl))
11600 remove = true;
11601 break;
11603 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11604 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11605 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11606 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11607 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11609 remove = true;
11610 break;
11612 if (!DECL_P (decl))
11614 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
11615 NULL, is_gimple_lvalue, fb_lvalue)
11616 == GS_ERROR)
11618 remove = true;
11619 break;
11621 break;
11623 goto do_notice;
11625 case OMP_CLAUSE_USE_DEVICE_PTR:
11626 case OMP_CLAUSE_USE_DEVICE_ADDR:
11627 flags = GOVD_EXPLICIT;
11628 goto do_add;
11630 case OMP_CLAUSE_HAS_DEVICE_ADDR:
11631 decl = OMP_CLAUSE_DECL (c);
11632 while (TREE_CODE (decl) == INDIRECT_REF
11633 || TREE_CODE (decl) == ARRAY_REF)
11634 decl = TREE_OPERAND (decl, 0);
11635 flags = GOVD_EXPLICIT;
11636 goto do_add_decl;
11638 case OMP_CLAUSE_IS_DEVICE_PTR:
11639 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
11640 goto do_add;
11642 do_add:
11643 decl = OMP_CLAUSE_DECL (c);
11644 do_add_decl:
11645 if (error_operand_p (decl))
11647 remove = true;
11648 break;
11650 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
11652 tree t = omp_member_access_dummy_var (decl);
11653 if (t)
11655 tree v = DECL_VALUE_EXPR (decl);
11656 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
11657 if (outer_ctx)
11658 omp_notice_variable (outer_ctx, t, true);
11661 if (code == OACC_DATA
11662 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11663 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
11664 flags |= GOVD_MAP_0LEN_ARRAY;
11665 omp_add_variable (ctx, decl, flags);
11666 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11667 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
11668 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
11669 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11671 struct gimplify_omp_ctx *pctx
11672 = code == OMP_TARGET ? outer_ctx : ctx;
11673 if (pctx)
11674 omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
11675 GOVD_LOCAL | GOVD_SEEN);
11676 if (pctx
11677 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
11678 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
11679 find_decl_expr,
11680 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11681 NULL) == NULL_TREE)
11682 omp_add_variable (pctx,
11683 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11684 GOVD_LOCAL | GOVD_SEEN);
11685 gimplify_omp_ctxp = pctx;
11686 push_gimplify_context ();
11688 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11689 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11691 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
11692 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
11693 pop_gimplify_context
11694 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
11695 push_gimplify_context ();
11696 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
11697 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
11698 pop_gimplify_context
11699 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
11700 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
11701 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
11703 gimplify_omp_ctxp = outer_ctx;
11705 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11706 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
11708 gimplify_omp_ctxp = ctx;
11709 push_gimplify_context ();
11710 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
11712 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11713 NULL, NULL);
11714 TREE_SIDE_EFFECTS (bind) = 1;
11715 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
11716 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
11718 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
11719 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
11720 pop_gimplify_context
11721 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
11722 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
11724 gimplify_omp_ctxp = outer_ctx;
11726 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11727 && OMP_CLAUSE_LINEAR_STMT (c))
11729 gimplify_omp_ctxp = ctx;
11730 push_gimplify_context ();
11731 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
11733 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11734 NULL, NULL);
11735 TREE_SIDE_EFFECTS (bind) = 1;
11736 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
11737 OMP_CLAUSE_LINEAR_STMT (c) = bind;
11739 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
11740 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
11741 pop_gimplify_context
11742 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
11743 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
11745 gimplify_omp_ctxp = outer_ctx;
11747 if (notice_outer)
11748 goto do_notice;
11749 break;
11751 case OMP_CLAUSE_COPYIN:
11752 case OMP_CLAUSE_COPYPRIVATE:
11753 decl = OMP_CLAUSE_DECL (c);
11754 if (error_operand_p (decl))
11756 remove = true;
11757 break;
11759 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
11760 && !remove
11761 && !omp_check_private (ctx, decl, true))
11763 remove = true;
11764 if (is_global_var (decl))
11766 if (DECL_THREAD_LOCAL_P (decl))
11767 remove = false;
11768 else if (DECL_HAS_VALUE_EXPR_P (decl))
11770 tree value = get_base_address (DECL_VALUE_EXPR (decl));
11772 if (value
11773 && DECL_P (value)
11774 && DECL_THREAD_LOCAL_P (value))
11775 remove = false;
11778 if (remove)
11779 error_at (OMP_CLAUSE_LOCATION (c),
11780 "copyprivate variable %qE is not threadprivate"
11781 " or private in outer context", DECL_NAME (decl));
11783 do_notice:
11784 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11785 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
11786 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11787 && outer_ctx
11788 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
11789 || (region_type == ORT_WORKSHARE
11790 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11791 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
11792 || code == OMP_LOOP)))
11793 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
11794 || (code == OMP_LOOP
11795 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11796 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
11797 == ORT_COMBINED_TEAMS))))
11799 splay_tree_node on
11800 = splay_tree_lookup (outer_ctx->variables,
11801 (splay_tree_key)decl);
11802 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
11804 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11805 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
11806 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
11807 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11808 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
11809 == POINTER_TYPE))))
11810 omp_firstprivatize_variable (outer_ctx, decl);
11811 else
11813 omp_add_variable (outer_ctx, decl,
11814 GOVD_SEEN | GOVD_SHARED);
11815 if (outer_ctx->outer_context)
11816 omp_notice_variable (outer_ctx->outer_context, decl,
11817 true);
11821 if (outer_ctx)
11822 omp_notice_variable (outer_ctx, decl, true);
11823 if (check_non_private
11824 && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
11825 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
11826 || decl == OMP_CLAUSE_DECL (c)
11827 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
11828 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11829 == ADDR_EXPR
11830 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11831 == POINTER_PLUS_EXPR
11832 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
11833 (OMP_CLAUSE_DECL (c), 0), 0))
11834 == ADDR_EXPR)))))
11835 && omp_check_private (ctx, decl, false))
11837 error ("%s variable %qE is private in outer context",
11838 check_non_private, DECL_NAME (decl));
11839 remove = true;
11841 break;
11843 case OMP_CLAUSE_DETACH:
11844 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
11845 goto do_add;
11847 case OMP_CLAUSE_IF:
11848 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
11849 && OMP_CLAUSE_IF_MODIFIER (c) != code)
11851 const char *p[2];
11852 for (int i = 0; i < 2; i++)
11853 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
11855 case VOID_CST: p[i] = "cancel"; break;
11856 case OMP_PARALLEL: p[i] = "parallel"; break;
11857 case OMP_SIMD: p[i] = "simd"; break;
11858 case OMP_TASK: p[i] = "task"; break;
11859 case OMP_TASKLOOP: p[i] = "taskloop"; break;
11860 case OMP_TARGET_DATA: p[i] = "target data"; break;
11861 case OMP_TARGET: p[i] = "target"; break;
11862 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
11863 case OMP_TARGET_ENTER_DATA:
11864 p[i] = "target enter data"; break;
11865 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
11866 default: gcc_unreachable ();
11868 error_at (OMP_CLAUSE_LOCATION (c),
11869 "expected %qs %<if%> clause modifier rather than %qs",
11870 p[0], p[1]);
11871 remove = true;
11873 /* Fall through. */
11875 case OMP_CLAUSE_FINAL:
11876 OMP_CLAUSE_OPERAND (c, 0)
11877 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
11878 /* Fall through. */
11880 case OMP_CLAUSE_NUM_TEAMS:
11881 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
11882 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
11883 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
11885 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
11887 remove = true;
11888 break;
11890 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
11891 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
11892 pre_p, NULL, true);
11894 /* Fall through. */
11896 case OMP_CLAUSE_SCHEDULE:
11897 case OMP_CLAUSE_NUM_THREADS:
11898 case OMP_CLAUSE_THREAD_LIMIT:
11899 case OMP_CLAUSE_DIST_SCHEDULE:
11900 case OMP_CLAUSE_DEVICE:
11901 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
11902 && OMP_CLAUSE_DEVICE_ANCESTOR (c))
11904 if (code != OMP_TARGET)
11906 error_at (OMP_CLAUSE_LOCATION (c),
11907 "%<device%> clause with %<ancestor%> is only "
11908 "allowed on %<target%> construct");
11909 remove = true;
11910 break;
11913 tree clauses = *orig_list_p;
11914 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
11915 if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
11916 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
11917 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
11918 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
11919 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
11922 error_at (OMP_CLAUSE_LOCATION (c),
11923 "with %<ancestor%>, only the %<device%>, "
11924 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
11925 "and %<map%> clauses may appear on the "
11926 "construct");
11927 remove = true;
11928 break;
11931 /* Fall through. */
11933 case OMP_CLAUSE_PRIORITY:
11934 case OMP_CLAUSE_GRAINSIZE:
11935 case OMP_CLAUSE_NUM_TASKS:
11936 case OMP_CLAUSE_FILTER:
11937 case OMP_CLAUSE_HINT:
11938 case OMP_CLAUSE_ASYNC:
11939 case OMP_CLAUSE_WAIT:
11940 case OMP_CLAUSE_NUM_GANGS:
11941 case OMP_CLAUSE_NUM_WORKERS:
11942 case OMP_CLAUSE_VECTOR_LENGTH:
11943 case OMP_CLAUSE_WORKER:
11944 case OMP_CLAUSE_VECTOR:
11945 if (OMP_CLAUSE_OPERAND (c, 0)
11946 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
11948 if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
11950 remove = true;
11951 break;
11953 /* All these clauses care about value, not a particular decl,
11954 so try to force it into a SSA_NAME or fresh temporary. */
11955 OMP_CLAUSE_OPERAND (c, 0)
11956 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
11957 pre_p, NULL, true);
11959 break;
11961 case OMP_CLAUSE_GANG:
11962 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
11963 is_gimple_val, fb_rvalue) == GS_ERROR)
11964 remove = true;
11965 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
11966 is_gimple_val, fb_rvalue) == GS_ERROR)
11967 remove = true;
11968 break;
11970 case OMP_CLAUSE_NOWAIT:
11971 nowait = 1;
11972 break;
11974 case OMP_CLAUSE_ORDERED:
11975 case OMP_CLAUSE_UNTIED:
11976 case OMP_CLAUSE_COLLAPSE:
11977 case OMP_CLAUSE_TILE:
11978 case OMP_CLAUSE_AUTO:
11979 case OMP_CLAUSE_SEQ:
11980 case OMP_CLAUSE_INDEPENDENT:
11981 case OMP_CLAUSE_MERGEABLE:
11982 case OMP_CLAUSE_PROC_BIND:
11983 case OMP_CLAUSE_SAFELEN:
11984 case OMP_CLAUSE_SIMDLEN:
11985 case OMP_CLAUSE_NOGROUP:
11986 case OMP_CLAUSE_THREADS:
11987 case OMP_CLAUSE_SIMD:
11988 case OMP_CLAUSE_BIND:
11989 case OMP_CLAUSE_IF_PRESENT:
11990 case OMP_CLAUSE_FINALIZE:
11991 break;
11993 case OMP_CLAUSE_ORDER:
11994 ctx->order_concurrent = true;
11995 break;
11997 case OMP_CLAUSE_DEFAULTMAP:
11998 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
11999 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
12001 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
12002 gdmkmin = GDMK_SCALAR;
12003 gdmkmax = GDMK_POINTER;
12004 break;
12005 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
12006 gdmkmin = GDMK_SCALAR;
12007 gdmkmax = GDMK_SCALAR_TARGET;
12008 break;
12009 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
12010 gdmkmin = gdmkmax = GDMK_AGGREGATE;
12011 break;
12012 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
12013 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
12014 break;
12015 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
12016 gdmkmin = gdmkmax = GDMK_POINTER;
12017 break;
12018 default:
12019 gcc_unreachable ();
12021 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
12022 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
12024 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
12025 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
12026 break;
12027 case OMP_CLAUSE_DEFAULTMAP_TO:
12028 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
12029 break;
12030 case OMP_CLAUSE_DEFAULTMAP_FROM:
12031 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
12032 break;
12033 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
12034 ctx->defaultmap[gdmk] = GOVD_MAP;
12035 break;
12036 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
12037 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
12038 break;
12039 case OMP_CLAUSE_DEFAULTMAP_NONE:
12040 ctx->defaultmap[gdmk] = 0;
12041 break;
12042 case OMP_CLAUSE_DEFAULTMAP_PRESENT:
12043 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
12044 break;
12045 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
12046 switch (gdmk)
12048 case GDMK_SCALAR:
12049 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
12050 break;
12051 case GDMK_SCALAR_TARGET:
12052 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
12053 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
12054 break;
12055 case GDMK_AGGREGATE:
12056 case GDMK_ALLOCATABLE:
12057 ctx->defaultmap[gdmk] = GOVD_MAP;
12058 break;
12059 case GDMK_POINTER:
12060 ctx->defaultmap[gdmk] = GOVD_MAP;
12061 if (!lang_GNU_Fortran ())
12062 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
12063 break;
12064 default:
12065 gcc_unreachable ();
12067 break;
12068 default:
12069 gcc_unreachable ();
12071 break;
12073 case OMP_CLAUSE_ALIGNED:
12074 decl = OMP_CLAUSE_DECL (c);
12075 if (error_operand_p (decl))
12077 remove = true;
12078 break;
12080 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
12081 is_gimple_val, fb_rvalue) == GS_ERROR)
12083 remove = true;
12084 break;
12086 if (!is_global_var (decl)
12087 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12088 omp_add_variable (ctx, decl, GOVD_ALIGNED);
12089 break;
12091 case OMP_CLAUSE_NONTEMPORAL:
12092 decl = OMP_CLAUSE_DECL (c);
12093 if (error_operand_p (decl))
12095 remove = true;
12096 break;
12098 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
12099 break;
12101 case OMP_CLAUSE_ALLOCATE:
12102 decl = OMP_CLAUSE_DECL (c);
12103 if (error_operand_p (decl))
12105 remove = true;
12106 break;
12108 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
12109 is_gimple_val, fb_rvalue) == GS_ERROR)
12111 remove = true;
12112 break;
12114 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
12115 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
12116 == INTEGER_CST))
12118 else if (code == OMP_TASKLOOP
12119 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
12120 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12121 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12122 pre_p, NULL, false);
12123 break;
12125 case OMP_CLAUSE_DEFAULT:
12126 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
12127 break;
12129 case OMP_CLAUSE_INCLUSIVE:
12130 case OMP_CLAUSE_EXCLUSIVE:
12131 decl = OMP_CLAUSE_DECL (c);
12133 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
12134 (splay_tree_key) decl);
12135 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
12137 error_at (OMP_CLAUSE_LOCATION (c),
12138 "%qD specified in %qs clause but not in %<inscan%> "
12139 "%<reduction%> clause on the containing construct",
12140 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
12141 remove = true;
12143 else
12145 n->value |= GOVD_REDUCTION_INSCAN;
12146 if (outer_ctx->region_type == ORT_SIMD
12147 && outer_ctx->outer_context
12148 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
12150 n = splay_tree_lookup (outer_ctx->outer_context->variables,
12151 (splay_tree_key) decl);
12152 if (n && (n->value & GOVD_REDUCTION) != 0)
12153 n->value |= GOVD_REDUCTION_INSCAN;
12157 break;
12159 case OMP_CLAUSE_NOHOST:
12160 default:
12161 gcc_unreachable ();
12164 if (code == OACC_DATA
12165 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12166 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12167 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12168 remove = true;
12169 if (remove)
12170 *list_p = OMP_CLAUSE_CHAIN (c);
12171 else
12172 list_p = &OMP_CLAUSE_CHAIN (c);
12175 ctx->clauses = *orig_list_p;
12176 gimplify_omp_ctxp = ctx;
12179 /* Return true if DECL is a candidate for shared to firstprivate
12180 optimization. We only consider non-addressable scalars, not
12181 too big, and not references. */
12183 static bool
12184 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
12186 if (TREE_ADDRESSABLE (decl))
12187 return false;
12188 tree type = TREE_TYPE (decl);
12189 if (!is_gimple_reg_type (type)
12190 || TREE_CODE (type) == REFERENCE_TYPE
12191 || TREE_ADDRESSABLE (type))
12192 return false;
12193 /* Don't optimize too large decls, as each thread/task will have
12194 its own. */
12195 HOST_WIDE_INT len = int_size_in_bytes (type);
12196 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
12197 return false;
12198 if (omp_privatize_by_reference (decl))
12199 return false;
12200 return true;
12203 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
12204 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
12205 GOVD_WRITTEN in outer contexts. */
12207 static void
12208 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
12210 for (; ctx; ctx = ctx->outer_context)
12212 splay_tree_node n = splay_tree_lookup (ctx->variables,
12213 (splay_tree_key) decl);
12214 if (n == NULL)
12215 continue;
12216 else if (n->value & GOVD_SHARED)
12218 n->value |= GOVD_WRITTEN;
12219 return;
12221 else if (n->value & GOVD_DATA_SHARE_CLASS)
12222 return;
12226 /* Helper callback for walk_gimple_seq to discover possible stores
12227 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12228 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12229 for those. */
12231 static tree
12232 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
12234 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12236 *walk_subtrees = 0;
12237 if (!wi->is_lhs)
12238 return NULL_TREE;
12240 tree op = *tp;
12243 if (handled_component_p (op))
12244 op = TREE_OPERAND (op, 0);
12245 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
12246 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
12247 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
12248 else
12249 break;
12251 while (1);
12252 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
12253 return NULL_TREE;
12255 omp_mark_stores (gimplify_omp_ctxp, op);
12256 return NULL_TREE;
12259 /* Helper callback for walk_gimple_seq to discover possible stores
12260 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12261 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12262 for those. */
12264 static tree
12265 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
12266 bool *handled_ops_p,
12267 struct walk_stmt_info *wi)
12269 gimple *stmt = gsi_stmt (*gsi_p);
12270 switch (gimple_code (stmt))
12272 /* Don't recurse on OpenMP constructs for which
12273 gimplify_adjust_omp_clauses already handled the bodies,
12274 except handle gimple_omp_for_pre_body. */
12275 case GIMPLE_OMP_FOR:
12276 *handled_ops_p = true;
12277 if (gimple_omp_for_pre_body (stmt))
12278 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12279 omp_find_stores_stmt, omp_find_stores_op, wi);
12280 break;
12281 case GIMPLE_OMP_PARALLEL:
12282 case GIMPLE_OMP_TASK:
12283 case GIMPLE_OMP_SECTIONS:
12284 case GIMPLE_OMP_SINGLE:
12285 case GIMPLE_OMP_SCOPE:
12286 case GIMPLE_OMP_TARGET:
12287 case GIMPLE_OMP_TEAMS:
12288 case GIMPLE_OMP_CRITICAL:
12289 *handled_ops_p = true;
12290 break;
12291 default:
12292 break;
12294 return NULL_TREE;
12297 struct gimplify_adjust_omp_clauses_data
12299 tree *list_p;
12300 gimple_seq *pre_p;
12303 /* For all variables that were not actually used within the context,
12304 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
12306 static int
12307 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
12309 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
12310 gimple_seq *pre_p
12311 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
12312 tree decl = (tree) n->key;
12313 unsigned flags = n->value;
12314 enum omp_clause_code code;
12315 tree clause;
12316 bool private_debug;
12318 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12319 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
12320 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
12321 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
12322 return 0;
12323 if ((flags & GOVD_SEEN) == 0)
12324 return 0;
12325 if (flags & GOVD_DEBUG_PRIVATE)
12327 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
12328 private_debug = true;
12330 else if (flags & GOVD_MAP)
12331 private_debug = false;
12332 else
12333 private_debug
12334 = lang_hooks.decls.omp_private_debug_clause (decl,
12335 !!(flags & GOVD_SHARED));
12336 if (private_debug)
12337 code = OMP_CLAUSE_PRIVATE;
12338 else if (flags & GOVD_MAP)
12340 code = OMP_CLAUSE_MAP;
12341 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12342 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12344 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
12345 return 0;
12347 if (VAR_P (decl)
12348 && DECL_IN_CONSTANT_POOL (decl)
12349 && !lookup_attribute ("omp declare target",
12350 DECL_ATTRIBUTES (decl)))
12352 tree id = get_identifier ("omp declare target");
12353 DECL_ATTRIBUTES (decl)
12354 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
12355 varpool_node *node = varpool_node::get (decl);
12356 if (node)
12358 node->offloadable = 1;
12359 if (ENABLE_OFFLOADING)
12360 g->have_offload = true;
12364 else if (flags & GOVD_SHARED)
12366 if (is_global_var (decl))
12368 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12369 while (ctx != NULL)
12371 splay_tree_node on
12372 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12373 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
12374 | GOVD_PRIVATE | GOVD_REDUCTION
12375 | GOVD_LINEAR | GOVD_MAP)) != 0)
12376 break;
12377 ctx = ctx->outer_context;
12379 if (ctx == NULL)
12380 return 0;
12382 code = OMP_CLAUSE_SHARED;
12383 /* Don't optimize shared into firstprivate for read-only vars
12384 on tasks with depend clause, we shouldn't try to copy them
12385 until the dependencies are satisfied. */
12386 if (gimplify_omp_ctxp->has_depend)
12387 flags |= GOVD_WRITTEN;
12389 else if (flags & GOVD_PRIVATE)
12390 code = OMP_CLAUSE_PRIVATE;
12391 else if (flags & GOVD_FIRSTPRIVATE)
12393 code = OMP_CLAUSE_FIRSTPRIVATE;
12394 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
12395 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12396 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12398 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
12399 "%<target%> construct", decl);
12400 return 0;
12403 else if (flags & GOVD_LASTPRIVATE)
12404 code = OMP_CLAUSE_LASTPRIVATE;
12405 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
12406 return 0;
12407 else if (flags & GOVD_CONDTEMP)
12409 code = OMP_CLAUSE__CONDTEMP_;
12410 gimple_add_tmp_var (decl);
12412 else
12413 gcc_unreachable ();
12415 if (((flags & GOVD_LASTPRIVATE)
12416 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
12417 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12418 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12420 tree chain = *list_p;
12421 clause = build_omp_clause (input_location, code);
12422 OMP_CLAUSE_DECL (clause) = decl;
12423 OMP_CLAUSE_CHAIN (clause) = chain;
12424 if (private_debug)
12425 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
12426 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
12427 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
12428 else if (code == OMP_CLAUSE_SHARED
12429 && (flags & GOVD_WRITTEN) == 0
12430 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12431 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
12432 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
12433 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
12434 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
12436 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
12437 OMP_CLAUSE_DECL (nc) = decl;
12438 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12439 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12440 OMP_CLAUSE_DECL (clause)
12441 = build_simple_mem_ref_loc (input_location, decl);
12442 OMP_CLAUSE_DECL (clause)
12443 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
12444 build_int_cst (build_pointer_type (char_type_node), 0));
12445 OMP_CLAUSE_SIZE (clause) = size_zero_node;
12446 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12447 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
12448 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
12449 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12450 OMP_CLAUSE_CHAIN (nc) = chain;
12451 OMP_CLAUSE_CHAIN (clause) = nc;
12452 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12453 gimplify_omp_ctxp = ctx->outer_context;
12454 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
12455 pre_p, NULL, is_gimple_val, fb_rvalue);
12456 gimplify_omp_ctxp = ctx;
12458 else if (code == OMP_CLAUSE_MAP)
12460 int kind;
12461 /* Not all combinations of these GOVD_MAP flags are actually valid. */
12462 switch (flags & (GOVD_MAP_TO_ONLY
12463 | GOVD_MAP_FORCE
12464 | GOVD_MAP_FORCE_PRESENT
12465 | GOVD_MAP_ALLOC_ONLY
12466 | GOVD_MAP_FROM_ONLY))
12468 case 0:
12469 kind = GOMP_MAP_TOFROM;
12470 break;
12471 case GOVD_MAP_FORCE:
12472 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
12473 break;
12474 case GOVD_MAP_TO_ONLY:
12475 kind = GOMP_MAP_TO;
12476 break;
12477 case GOVD_MAP_FROM_ONLY:
12478 kind = GOMP_MAP_FROM;
12479 break;
12480 case GOVD_MAP_ALLOC_ONLY:
12481 kind = GOMP_MAP_ALLOC;
12482 break;
12483 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
12484 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
12485 break;
12486 case GOVD_MAP_FORCE_PRESENT:
12487 kind = GOMP_MAP_FORCE_PRESENT;
12488 break;
12489 case GOVD_MAP_FORCE_PRESENT | GOVD_MAP_ALLOC_ONLY:
12490 kind = GOMP_MAP_FORCE_PRESENT;
12491 break;
12492 default:
12493 gcc_unreachable ();
12495 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
12496 /* Setting of the implicit flag for the runtime is currently disabled for
12497 OpenACC. */
12498 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
12499 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
12500 if (DECL_SIZE (decl)
12501 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12503 tree decl2 = DECL_VALUE_EXPR (decl);
12504 gcc_assert (INDIRECT_REF_P (decl2));
12505 decl2 = TREE_OPERAND (decl2, 0);
12506 gcc_assert (DECL_P (decl2));
12507 tree mem = build_simple_mem_ref (decl2);
12508 OMP_CLAUSE_DECL (clause) = mem;
12509 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12510 if (gimplify_omp_ctxp->outer_context)
12512 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12513 omp_notice_variable (ctx, decl2, true);
12514 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
12516 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12517 OMP_CLAUSE_MAP);
12518 OMP_CLAUSE_DECL (nc) = decl;
12519 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12520 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
12521 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12522 else
12523 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12524 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12525 OMP_CLAUSE_CHAIN (clause) = nc;
12527 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
12528 && omp_privatize_by_reference (decl))
12530 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
12531 OMP_CLAUSE_SIZE (clause)
12532 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
12533 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12534 gimplify_omp_ctxp = ctx->outer_context;
12535 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
12536 pre_p, NULL, is_gimple_val, fb_rvalue);
12537 gimplify_omp_ctxp = ctx;
12538 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12539 OMP_CLAUSE_MAP);
12540 OMP_CLAUSE_DECL (nc) = decl;
12541 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12542 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
12543 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12544 OMP_CLAUSE_CHAIN (clause) = nc;
12546 else
12547 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
12549 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
12551 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
12552 OMP_CLAUSE_DECL (nc) = decl;
12553 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
12554 OMP_CLAUSE_CHAIN (nc) = chain;
12555 OMP_CLAUSE_CHAIN (clause) = nc;
12556 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12557 gimplify_omp_ctxp = ctx->outer_context;
12558 lang_hooks.decls.omp_finish_clause (nc, pre_p,
12559 (ctx->region_type & ORT_ACC) != 0);
12560 gimplify_omp_ctxp = ctx;
12562 *list_p = clause;
12563 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12564 gimplify_omp_ctxp = ctx->outer_context;
12565 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
12566 in simd. Those are only added for the local vars inside of simd body
12567 and they don't need to be e.g. default constructible. */
12568 if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
12569 lang_hooks.decls.omp_finish_clause (clause, pre_p,
12570 (ctx->region_type & ORT_ACC) != 0);
12571 if (gimplify_omp_ctxp)
12572 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
12573 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
12574 && DECL_P (OMP_CLAUSE_SIZE (clause)))
12575 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
12576 true);
12577 gimplify_omp_ctxp = ctx;
12578 return 0;
12581 static void
12582 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
12583 enum tree_code code)
12585 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12586 tree *orig_list_p = list_p;
12587 tree c, decl;
12588 bool has_inscan_reductions = false;
12590 if (body)
12592 struct gimplify_omp_ctx *octx;
12593 for (octx = ctx; octx; octx = octx->outer_context)
12594 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
12595 break;
12596 if (octx)
12598 struct walk_stmt_info wi;
12599 memset (&wi, 0, sizeof (wi));
12600 walk_gimple_seq (body, omp_find_stores_stmt,
12601 omp_find_stores_op, &wi);
12605 if (ctx->add_safelen1)
12607 /* If there are VLAs in the body of simd loop, prevent
12608 vectorization. */
12609 gcc_assert (ctx->region_type == ORT_SIMD);
12610 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
12611 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
12612 OMP_CLAUSE_CHAIN (c) = *list_p;
12613 *list_p = c;
12614 list_p = &OMP_CLAUSE_CHAIN (c);
12617 if (ctx->region_type == ORT_WORKSHARE
12618 && ctx->outer_context
12619 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
12621 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
12622 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12623 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12625 decl = OMP_CLAUSE_DECL (c);
12626 splay_tree_node n
12627 = splay_tree_lookup (ctx->outer_context->variables,
12628 (splay_tree_key) decl);
12629 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
12630 (splay_tree_key) decl));
12631 omp_add_variable (ctx, decl, n->value);
12632 tree c2 = copy_node (c);
12633 OMP_CLAUSE_CHAIN (c2) = *list_p;
12634 *list_p = c2;
12635 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
12636 continue;
12637 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12638 OMP_CLAUSE_FIRSTPRIVATE);
12639 OMP_CLAUSE_DECL (c2) = decl;
12640 OMP_CLAUSE_CHAIN (c2) = *list_p;
12641 *list_p = c2;
12645 tree attach_list = NULL_TREE;
12646 tree *attach_tail = &attach_list;
12648 while ((c = *list_p) != NULL)
12650 splay_tree_node n;
12651 bool remove = false;
12652 bool move_attach = false;
12654 switch (OMP_CLAUSE_CODE (c))
12656 case OMP_CLAUSE_FIRSTPRIVATE:
12657 if ((ctx->region_type & ORT_TARGET)
12658 && (ctx->region_type & ORT_ACC) == 0
12659 && TYPE_ATOMIC (strip_array_types
12660 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
12662 error_at (OMP_CLAUSE_LOCATION (c),
12663 "%<_Atomic%> %qD in %<firstprivate%> clause on "
12664 "%<target%> construct", OMP_CLAUSE_DECL (c));
12665 remove = true;
12666 break;
12668 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12670 decl = OMP_CLAUSE_DECL (c);
12671 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12672 if ((n->value & GOVD_MAP) != 0)
12674 remove = true;
12675 break;
12677 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
12678 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
12680 /* FALLTHRU */
12681 case OMP_CLAUSE_PRIVATE:
12682 case OMP_CLAUSE_SHARED:
12683 case OMP_CLAUSE_LINEAR:
12684 decl = OMP_CLAUSE_DECL (c);
12685 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12686 remove = !(n->value & GOVD_SEEN);
12687 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
12688 && code == OMP_PARALLEL
12689 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12690 remove = true;
12691 if (! remove)
12693 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
12694 if ((n->value & GOVD_DEBUG_PRIVATE)
12695 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
12697 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
12698 || ((n->value & GOVD_DATA_SHARE_CLASS)
12699 == GOVD_SHARED));
12700 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
12701 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
12703 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12704 && ctx->has_depend
12705 && DECL_P (decl))
12706 n->value |= GOVD_WRITTEN;
12707 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12708 && (n->value & GOVD_WRITTEN) == 0
12709 && DECL_P (decl)
12710 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12711 OMP_CLAUSE_SHARED_READONLY (c) = 1;
12712 else if (DECL_P (decl)
12713 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12714 && (n->value & GOVD_WRITTEN) != 0)
12715 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12716 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
12717 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12718 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12720 else
12721 n->value &= ~GOVD_EXPLICIT;
12722 break;
12724 case OMP_CLAUSE_LASTPRIVATE:
12725 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
12726 accurately reflect the presence of a FIRSTPRIVATE clause. */
12727 decl = OMP_CLAUSE_DECL (c);
12728 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12729 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
12730 = (n->value & GOVD_FIRSTPRIVATE) != 0;
12731 if (code == OMP_DISTRIBUTE
12732 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12734 remove = true;
12735 error_at (OMP_CLAUSE_LOCATION (c),
12736 "same variable used in %<firstprivate%> and "
12737 "%<lastprivate%> clauses on %<distribute%> "
12738 "construct");
12740 if (!remove
12741 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12742 && DECL_P (decl)
12743 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12744 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12745 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
12746 remove = true;
12747 break;
12749 case OMP_CLAUSE_ALIGNED:
12750 decl = OMP_CLAUSE_DECL (c);
12751 if (!is_global_var (decl))
12753 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12754 remove = n == NULL || !(n->value & GOVD_SEEN);
12755 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12757 struct gimplify_omp_ctx *octx;
12758 if (n != NULL
12759 && (n->value & (GOVD_DATA_SHARE_CLASS
12760 & ~GOVD_FIRSTPRIVATE)))
12761 remove = true;
12762 else
12763 for (octx = ctx->outer_context; octx;
12764 octx = octx->outer_context)
12766 n = splay_tree_lookup (octx->variables,
12767 (splay_tree_key) decl);
12768 if (n == NULL)
12769 continue;
12770 if (n->value & GOVD_LOCAL)
12771 break;
12772 /* We have to avoid assigning a shared variable
12773 to itself when trying to add
12774 __builtin_assume_aligned. */
12775 if (n->value & GOVD_SHARED)
12777 remove = true;
12778 break;
12783 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
12785 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12786 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
12787 remove = true;
12789 break;
12791 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12792 decl = OMP_CLAUSE_DECL (c);
12793 while (INDIRECT_REF_P (decl)
12794 || TREE_CODE (decl) == ARRAY_REF)
12795 decl = TREE_OPERAND (decl, 0);
12796 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12797 remove = n == NULL || !(n->value & GOVD_SEEN);
12798 break;
12800 case OMP_CLAUSE_IS_DEVICE_PTR:
12801 case OMP_CLAUSE_NONTEMPORAL:
12802 decl = OMP_CLAUSE_DECL (c);
12803 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12804 remove = n == NULL || !(n->value & GOVD_SEEN);
12805 break;
12807 case OMP_CLAUSE_MAP:
12808 switch (OMP_CLAUSE_MAP_KIND (c))
12810 case GOMP_MAP_PRESENT_ALLOC:
12811 case GOMP_MAP_PRESENT_TO:
12812 case GOMP_MAP_PRESENT_FROM:
12813 case GOMP_MAP_PRESENT_TOFROM:
12814 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_PRESENT);
12815 break;
12816 default:
12817 break;
12819 if (code == OMP_TARGET_EXIT_DATA
12820 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
12822 remove = true;
12823 break;
12825 /* If we have a target region, we can push all the attaches to the
12826 end of the list (we may have standalone "attach" operations
12827 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
12828 the attachment point AND the pointed-to block have been mapped).
12829 If we have something else, e.g. "enter data", we need to keep
12830 "attach" nodes together with the previous node they attach to so
12831 that separate "exit data" operations work properly (see
12832 libgomp/target.c). */
12833 if ((ctx->region_type & ORT_TARGET) != 0
12834 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12835 || (OMP_CLAUSE_MAP_KIND (c)
12836 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
12837 move_attach = true;
12838 decl = OMP_CLAUSE_DECL (c);
12839 /* Data clauses associated with reductions must be
12840 compatible with present_or_copy. Warn and adjust the clause
12841 if that is not the case. */
12842 if (ctx->region_type == ORT_ACC_PARALLEL
12843 || ctx->region_type == ORT_ACC_SERIAL)
12845 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
12846 n = NULL;
12848 if (DECL_P (t))
12849 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
12851 if (n && (n->value & GOVD_REDUCTION))
12853 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
12855 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
12856 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
12857 && kind != GOMP_MAP_FORCE_PRESENT
12858 && kind != GOMP_MAP_POINTER)
12860 warning_at (OMP_CLAUSE_LOCATION (c), 0,
12861 "incompatible data clause with reduction "
12862 "on %qE; promoting to %<present_or_copy%>",
12863 DECL_NAME (t));
12864 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
12868 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
12869 && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
12871 remove = true;
12872 break;
12874 if (!DECL_P (decl))
12876 if ((ctx->region_type & ORT_TARGET) != 0
12877 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
12879 if (INDIRECT_REF_P (decl)
12880 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
12881 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
12882 == REFERENCE_TYPE))
12883 decl = TREE_OPERAND (decl, 0);
12884 if (TREE_CODE (decl) == COMPONENT_REF)
12886 while (TREE_CODE (decl) == COMPONENT_REF)
12887 decl = TREE_OPERAND (decl, 0);
12888 if (DECL_P (decl))
12890 n = splay_tree_lookup (ctx->variables,
12891 (splay_tree_key) decl);
12892 if (!(n->value & GOVD_SEEN))
12893 remove = true;
12897 break;
12899 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12900 if ((ctx->region_type & ORT_TARGET) != 0
12901 && !(n->value & GOVD_SEEN)
12902 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
12903 && (!is_global_var (decl)
12904 || !lookup_attribute ("omp declare target link",
12905 DECL_ATTRIBUTES (decl))))
12907 remove = true;
12908 /* For struct element mapping, if struct is never referenced
12909 in target block and none of the mapping has always modifier,
12910 remove all the struct element mappings, which immediately
12911 follow the GOMP_MAP_STRUCT map clause. */
12912 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
12914 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
12915 while (cnt--)
12916 OMP_CLAUSE_CHAIN (c)
12917 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
12920 else if (DECL_SIZE (decl)
12921 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
12922 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
12923 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
12924 && (OMP_CLAUSE_MAP_KIND (c)
12925 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12927 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
12928 for these, TREE_CODE (DECL_SIZE (decl)) will always be
12929 INTEGER_CST. */
12930 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
12932 tree decl2 = DECL_VALUE_EXPR (decl);
12933 gcc_assert (INDIRECT_REF_P (decl2));
12934 decl2 = TREE_OPERAND (decl2, 0);
12935 gcc_assert (DECL_P (decl2));
12936 tree mem = build_simple_mem_ref (decl2);
12937 OMP_CLAUSE_DECL (c) = mem;
12938 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12939 if (ctx->outer_context)
12941 omp_notice_variable (ctx->outer_context, decl2, true);
12942 omp_notice_variable (ctx->outer_context,
12943 OMP_CLAUSE_SIZE (c), true);
12945 if (((ctx->region_type & ORT_TARGET) != 0
12946 || !ctx->target_firstprivatize_array_bases)
12947 && ((n->value & GOVD_SEEN) == 0
12948 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
12950 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12951 OMP_CLAUSE_MAP);
12952 OMP_CLAUSE_DECL (nc) = decl;
12953 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12954 if (ctx->target_firstprivatize_array_bases)
12955 OMP_CLAUSE_SET_MAP_KIND (nc,
12956 GOMP_MAP_FIRSTPRIVATE_POINTER);
12957 else
12958 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12959 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
12960 OMP_CLAUSE_CHAIN (c) = nc;
12961 c = nc;
12964 else
12966 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12967 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
12968 gcc_assert ((n->value & GOVD_SEEN) == 0
12969 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
12970 == 0));
12972 break;
12974 case OMP_CLAUSE_TO:
12975 case OMP_CLAUSE_FROM:
12976 case OMP_CLAUSE__CACHE_:
12977 decl = OMP_CLAUSE_DECL (c);
12978 if (!DECL_P (decl))
12979 break;
12980 if (DECL_SIZE (decl)
12981 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12983 tree decl2 = DECL_VALUE_EXPR (decl);
12984 gcc_assert (INDIRECT_REF_P (decl2));
12985 decl2 = TREE_OPERAND (decl2, 0);
12986 gcc_assert (DECL_P (decl2));
12987 tree mem = build_simple_mem_ref (decl2);
12988 OMP_CLAUSE_DECL (c) = mem;
12989 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12990 if (ctx->outer_context)
12992 omp_notice_variable (ctx->outer_context, decl2, true);
12993 omp_notice_variable (ctx->outer_context,
12994 OMP_CLAUSE_SIZE (c), true);
12997 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12998 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
12999 break;
13001 case OMP_CLAUSE_REDUCTION:
13002 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
13004 decl = OMP_CLAUSE_DECL (c);
13005 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13006 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
13008 remove = true;
13009 error_at (OMP_CLAUSE_LOCATION (c),
13010 "%qD specified in %<inscan%> %<reduction%> clause "
13011 "but not in %<scan%> directive clause", decl);
13012 break;
13014 has_inscan_reductions = true;
13016 /* FALLTHRU */
13017 case OMP_CLAUSE_IN_REDUCTION:
13018 case OMP_CLAUSE_TASK_REDUCTION:
13019 decl = OMP_CLAUSE_DECL (c);
13020 /* OpenACC reductions need a present_or_copy data clause.
13021 Add one if necessary. Emit error when the reduction is private. */
13022 if (ctx->region_type == ORT_ACC_PARALLEL
13023 || ctx->region_type == ORT_ACC_SERIAL)
13025 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13026 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
13028 remove = true;
13029 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
13030 "reduction on %qE", DECL_NAME (decl));
13032 else if ((n->value & GOVD_MAP) == 0)
13034 tree next = OMP_CLAUSE_CHAIN (c);
13035 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
13036 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
13037 OMP_CLAUSE_DECL (nc) = decl;
13038 OMP_CLAUSE_CHAIN (c) = nc;
13039 lang_hooks.decls.omp_finish_clause (nc, pre_p,
13040 (ctx->region_type
13041 & ORT_ACC) != 0);
13042 while (1)
13044 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
13045 if (OMP_CLAUSE_CHAIN (nc) == NULL)
13046 break;
13047 nc = OMP_CLAUSE_CHAIN (nc);
13049 OMP_CLAUSE_CHAIN (nc) = next;
13050 n->value |= GOVD_MAP;
13053 if (DECL_P (decl)
13054 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13055 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
13056 break;
13058 case OMP_CLAUSE_ALLOCATE:
13059 decl = OMP_CLAUSE_DECL (c);
13060 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13061 if (n != NULL && !(n->value & GOVD_SEEN))
13063 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
13064 != 0
13065 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
13066 remove = true;
13068 if (!remove
13069 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
13070 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
13071 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
13072 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
13073 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
13075 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
13076 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
13077 if (n == NULL)
13079 enum omp_clause_default_kind default_kind
13080 = ctx->default_kind;
13081 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
13082 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13083 true);
13084 ctx->default_kind = default_kind;
13086 else
13087 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13088 true);
13090 break;
13092 case OMP_CLAUSE_COPYIN:
13093 case OMP_CLAUSE_COPYPRIVATE:
13094 case OMP_CLAUSE_IF:
13095 case OMP_CLAUSE_NUM_THREADS:
13096 case OMP_CLAUSE_NUM_TEAMS:
13097 case OMP_CLAUSE_THREAD_LIMIT:
13098 case OMP_CLAUSE_DIST_SCHEDULE:
13099 case OMP_CLAUSE_DEVICE:
13100 case OMP_CLAUSE_SCHEDULE:
13101 case OMP_CLAUSE_NOWAIT:
13102 case OMP_CLAUSE_ORDERED:
13103 case OMP_CLAUSE_DEFAULT:
13104 case OMP_CLAUSE_UNTIED:
13105 case OMP_CLAUSE_COLLAPSE:
13106 case OMP_CLAUSE_FINAL:
13107 case OMP_CLAUSE_MERGEABLE:
13108 case OMP_CLAUSE_PROC_BIND:
13109 case OMP_CLAUSE_SAFELEN:
13110 case OMP_CLAUSE_SIMDLEN:
13111 case OMP_CLAUSE_DEPEND:
13112 case OMP_CLAUSE_DOACROSS:
13113 case OMP_CLAUSE_PRIORITY:
13114 case OMP_CLAUSE_GRAINSIZE:
13115 case OMP_CLAUSE_NUM_TASKS:
13116 case OMP_CLAUSE_NOGROUP:
13117 case OMP_CLAUSE_THREADS:
13118 case OMP_CLAUSE_SIMD:
13119 case OMP_CLAUSE_FILTER:
13120 case OMP_CLAUSE_HINT:
13121 case OMP_CLAUSE_DEFAULTMAP:
13122 case OMP_CLAUSE_ORDER:
13123 case OMP_CLAUSE_BIND:
13124 case OMP_CLAUSE_DETACH:
13125 case OMP_CLAUSE_USE_DEVICE_PTR:
13126 case OMP_CLAUSE_USE_DEVICE_ADDR:
13127 case OMP_CLAUSE_ASYNC:
13128 case OMP_CLAUSE_WAIT:
13129 case OMP_CLAUSE_INDEPENDENT:
13130 case OMP_CLAUSE_NUM_GANGS:
13131 case OMP_CLAUSE_NUM_WORKERS:
13132 case OMP_CLAUSE_VECTOR_LENGTH:
13133 case OMP_CLAUSE_GANG:
13134 case OMP_CLAUSE_WORKER:
13135 case OMP_CLAUSE_VECTOR:
13136 case OMP_CLAUSE_AUTO:
13137 case OMP_CLAUSE_SEQ:
13138 case OMP_CLAUSE_TILE:
13139 case OMP_CLAUSE_IF_PRESENT:
13140 case OMP_CLAUSE_FINALIZE:
13141 case OMP_CLAUSE_INCLUSIVE:
13142 case OMP_CLAUSE_EXCLUSIVE:
13143 break;
13145 case OMP_CLAUSE_NOHOST:
13146 default:
13147 gcc_unreachable ();
13150 if (remove)
13151 *list_p = OMP_CLAUSE_CHAIN (c);
13152 else if (move_attach)
13154 /* Remove attach node from here, separate out into its own list. */
13155 *attach_tail = c;
13156 *list_p = OMP_CLAUSE_CHAIN (c);
13157 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13158 attach_tail = &OMP_CLAUSE_CHAIN (c);
13160 else
13161 list_p = &OMP_CLAUSE_CHAIN (c);
13164 /* Splice attach nodes at the end of the list. */
13165 if (attach_list)
13167 *list_p = attach_list;
13168 list_p = attach_tail;
13171 /* Add in any implicit data sharing. */
13172 struct gimplify_adjust_omp_clauses_data data;
13173 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
13175 /* OpenMP. Implicit clauses are added at the start of the clause list,
13176 but after any non-map clauses. */
13177 tree *implicit_add_list_p = orig_list_p;
13178 while (*implicit_add_list_p
13179 && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
13180 implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
13181 data.list_p = implicit_add_list_p;
13183 else
13184 /* OpenACC. */
13185 data.list_p = list_p;
13186 data.pre_p = pre_p;
13187 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
13189 if (has_inscan_reductions)
13190 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
13191 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13192 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
13194 error_at (OMP_CLAUSE_LOCATION (c),
13195 "%<inscan%> %<reduction%> clause used together with "
13196 "%<linear%> clause for a variable other than loop "
13197 "iterator");
13198 break;
13201 gimplify_omp_ctxp = ctx->outer_context;
13202 delete_omp_context (ctx);
13205 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
13206 -1 if unknown yet (simd is involved, won't be known until vectorization)
13207 and 1 if they do. If SCORES is non-NULL, it should point to an array
13208 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
13209 of the CONSTRUCTS (position -1 if it will never match) followed by
13210 number of constructs in the OpenMP context construct trait. If the
13211 score depends on whether it will be in a declare simd clone or not,
13212 the function returns 2 and there will be two sets of the scores, the first
13213 one for the case that it is not in a declare simd clone, the other
13214 that it is in a declare simd clone. */
13217 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
13218 int *scores)
13220 int matched = 0, cnt = 0;
13221 bool simd_seen = false;
13222 bool target_seen = false;
13223 int declare_simd_cnt = -1;
13224 auto_vec<enum tree_code, 16> codes;
13225 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
13227 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
13228 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
13229 == ORT_TARGET && ctx->code == OMP_TARGET)
13230 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
13231 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
13232 || (ctx->region_type == ORT_SIMD
13233 && ctx->code == OMP_SIMD
13234 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
13236 ++cnt;
13237 if (scores)
13238 codes.safe_push (ctx->code);
13239 else if (matched < nconstructs && ctx->code == constructs[matched])
13241 if (ctx->code == OMP_SIMD)
13243 if (matched)
13244 return 0;
13245 simd_seen = true;
13247 ++matched;
13249 if (ctx->code == OMP_TARGET)
13251 if (scores == NULL)
13252 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
13253 target_seen = true;
13254 break;
13257 else if (ctx->region_type == ORT_WORKSHARE
13258 && ctx->code == OMP_LOOP
13259 && ctx->outer_context
13260 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
13261 && ctx->outer_context->outer_context
13262 && ctx->outer_context->outer_context->code == OMP_LOOP
13263 && ctx->outer_context->outer_context->distribute)
13264 ctx = ctx->outer_context->outer_context;
13265 ctx = ctx->outer_context;
13267 if (!target_seen
13268 && lookup_attribute ("omp declare simd",
13269 DECL_ATTRIBUTES (current_function_decl)))
13271 /* Declare simd is a maybe case, it is supposed to be added only to the
13272 omp-simd-clone.cc added clones and not to the base function. */
13273 declare_simd_cnt = cnt++;
13274 if (scores)
13275 codes.safe_push (OMP_SIMD);
13276 else if (cnt == 0
13277 && constructs[0] == OMP_SIMD)
13279 gcc_assert (matched == 0);
13280 simd_seen = true;
13281 if (++matched == nconstructs)
13282 return -1;
13285 if (tree attr = lookup_attribute ("omp declare variant variant",
13286 DECL_ATTRIBUTES (current_function_decl)))
13288 enum tree_code variant_constructs[5];
13289 int variant_nconstructs = 0;
13290 if (!target_seen)
13291 variant_nconstructs
13292 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
13293 variant_constructs);
13294 for (int i = 0; i < variant_nconstructs; i++)
13296 ++cnt;
13297 if (scores)
13298 codes.safe_push (variant_constructs[i]);
13299 else if (matched < nconstructs
13300 && variant_constructs[i] == constructs[matched])
13302 if (variant_constructs[i] == OMP_SIMD)
13304 if (matched)
13305 return 0;
13306 simd_seen = true;
13308 ++matched;
13312 if (!target_seen
13313 && lookup_attribute ("omp declare target block",
13314 DECL_ATTRIBUTES (current_function_decl)))
13316 if (scores)
13317 codes.safe_push (OMP_TARGET);
13318 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
13319 ++matched;
13321 if (scores)
13323 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
13325 int j = codes.length () - 1;
13326 for (int i = nconstructs - 1; i >= 0; i--)
13328 while (j >= 0
13329 && (pass != 0 || declare_simd_cnt != j)
13330 && constructs[i] != codes[j])
13331 --j;
13332 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
13333 *scores++ = j - 1;
13334 else
13335 *scores++ = j;
13337 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
13338 ? codes.length () - 1 : codes.length ());
13340 return declare_simd_cnt == -1 ? 1 : 2;
13342 if (matched == nconstructs)
13343 return simd_seen ? -1 : 1;
13344 return 0;
13347 /* Gimplify OACC_CACHE. */
13349 static void
13350 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
13352 tree expr = *expr_p;
13354 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
13355 OACC_CACHE);
13356 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
13357 OACC_CACHE);
13359 /* TODO: Do something sensible with this information. */
13361 *expr_p = NULL_TREE;
13364 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
13365 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
13366 kind. The entry kind will replace the one in CLAUSE, while the exit
13367 kind will be used in a new omp_clause and returned to the caller. */
13369 static tree
13370 gimplify_oacc_declare_1 (tree clause)
13372 HOST_WIDE_INT kind, new_op;
13373 bool ret = false;
13374 tree c = NULL;
13376 kind = OMP_CLAUSE_MAP_KIND (clause);
13378 switch (kind)
13380 case GOMP_MAP_ALLOC:
13381 new_op = GOMP_MAP_RELEASE;
13382 ret = true;
13383 break;
13385 case GOMP_MAP_FROM:
13386 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
13387 new_op = GOMP_MAP_FROM;
13388 ret = true;
13389 break;
13391 case GOMP_MAP_TOFROM:
13392 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
13393 new_op = GOMP_MAP_FROM;
13394 ret = true;
13395 break;
13397 case GOMP_MAP_DEVICE_RESIDENT:
13398 case GOMP_MAP_FORCE_DEVICEPTR:
13399 case GOMP_MAP_FORCE_PRESENT:
13400 case GOMP_MAP_LINK:
13401 case GOMP_MAP_POINTER:
13402 case GOMP_MAP_TO:
13403 break;
13405 default:
13406 gcc_unreachable ();
13407 break;
13410 if (ret)
13412 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
13413 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
13414 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
13417 return c;
13420 /* Gimplify OACC_DECLARE. */
13422 static void
13423 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
13425 tree expr = *expr_p;
13426 gomp_target *stmt;
13427 tree clauses, t, decl;
13429 clauses = OACC_DECLARE_CLAUSES (expr);
13431 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
13432 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
13434 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
13436 decl = OMP_CLAUSE_DECL (t);
13438 if (TREE_CODE (decl) == MEM_REF)
13439 decl = TREE_OPERAND (decl, 0);
13441 if (VAR_P (decl) && !is_oacc_declared (decl))
13443 tree attr = get_identifier ("oacc declare target");
13444 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
13445 DECL_ATTRIBUTES (decl));
13448 if (VAR_P (decl)
13449 && !is_global_var (decl)
13450 && DECL_CONTEXT (decl) == current_function_decl)
13452 tree c = gimplify_oacc_declare_1 (t);
13453 if (c)
13455 if (oacc_declare_returns == NULL)
13456 oacc_declare_returns = new hash_map<tree, tree>;
13458 oacc_declare_returns->put (decl, c);
13462 if (gimplify_omp_ctxp)
13463 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
13466 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
13467 clauses);
13469 gimplify_seq_add_stmt (pre_p, stmt);
13471 *expr_p = NULL_TREE;
13474 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
13475 gimplification of the body, as well as scanning the body for used
13476 variables. We need to do this scan now, because variable-sized
13477 decls will be decomposed during gimplification. */
13479 static void
13480 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
13482 tree expr = *expr_p;
13483 gimple *g;
13484 gimple_seq body = NULL;
13486 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
13487 OMP_PARALLEL_COMBINED (expr)
13488 ? ORT_COMBINED_PARALLEL
13489 : ORT_PARALLEL, OMP_PARALLEL);
13491 push_gimplify_context ();
13493 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
13494 if (gimple_code (g) == GIMPLE_BIND)
13495 pop_gimplify_context (g);
13496 else
13497 pop_gimplify_context (NULL);
13499 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
13500 OMP_PARALLEL);
13502 g = gimple_build_omp_parallel (body,
13503 OMP_PARALLEL_CLAUSES (expr),
13504 NULL_TREE, NULL_TREE);
13505 if (OMP_PARALLEL_COMBINED (expr))
13506 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
13507 gimplify_seq_add_stmt (pre_p, g);
13508 *expr_p = NULL_TREE;
13511 /* Gimplify the contents of an OMP_TASK statement. This involves
13512 gimplification of the body, as well as scanning the body for used
13513 variables. We need to do this scan now, because variable-sized
13514 decls will be decomposed during gimplification. */
13516 static void
13517 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
13519 tree expr = *expr_p;
13520 gimple *g;
13521 gimple_seq body = NULL;
13522 bool nowait = false;
13523 bool has_depend = false;
13525 if (OMP_TASK_BODY (expr) == NULL_TREE)
13527 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13528 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
13530 has_depend = true;
13531 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
13533 error_at (OMP_CLAUSE_LOCATION (c),
13534 "%<mutexinoutset%> kind in %<depend%> clause on a "
13535 "%<taskwait%> construct");
13536 break;
13539 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT)
13540 nowait = true;
13541 if (nowait && !has_depend)
13543 error_at (EXPR_LOCATION (expr),
13544 "%<taskwait%> construct with %<nowait%> clause but no "
13545 "%<depend%> clauses");
13546 *expr_p = NULL_TREE;
13547 return;
13551 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
13552 omp_find_clause (OMP_TASK_CLAUSES (expr),
13553 OMP_CLAUSE_UNTIED)
13554 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
13556 if (OMP_TASK_BODY (expr))
13558 push_gimplify_context ();
13560 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
13561 if (gimple_code (g) == GIMPLE_BIND)
13562 pop_gimplify_context (g);
13563 else
13564 pop_gimplify_context (NULL);
13567 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
13568 OMP_TASK);
13570 g = gimple_build_omp_task (body,
13571 OMP_TASK_CLAUSES (expr),
13572 NULL_TREE, NULL_TREE,
13573 NULL_TREE, NULL_TREE, NULL_TREE);
13574 if (OMP_TASK_BODY (expr) == NULL_TREE)
13575 gimple_omp_task_set_taskwait_p (g, true);
13576 gimplify_seq_add_stmt (pre_p, g);
13577 *expr_p = NULL_TREE;
13580 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
13581 force it into a temporary initialized in PRE_P and add firstprivate clause
13582 to ORIG_FOR_STMT. */
13584 static void
13585 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
13586 tree orig_for_stmt)
13588 if (*tp == NULL || is_gimple_constant (*tp))
13589 return;
13591 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
13592 /* Reference to pointer conversion is considered useless,
13593 but is significant for firstprivate clause. Force it
13594 here. */
13595 if (type
13596 && TREE_CODE (type) == POINTER_TYPE
13597 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
13599 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
13600 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
13601 gimplify_and_add (m, pre_p);
13602 *tp = v;
13605 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
13606 OMP_CLAUSE_DECL (c) = *tp;
13607 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
13608 OMP_FOR_CLAUSES (orig_for_stmt) = c;
13611 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
13612 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
13614 static tree
13615 find_standalone_omp_ordered (tree *tp, int *walk_subtrees, void *)
13617 switch (TREE_CODE (*tp))
13619 case OMP_ORDERED:
13620 if (OMP_ORDERED_BODY (*tp) == NULL_TREE)
13621 return *tp;
13622 break;
13623 case OMP_SIMD:
13624 case OMP_PARALLEL:
13625 case OMP_TARGET:
13626 *walk_subtrees = 0;
13627 break;
13628 default:
13629 break;
13631 return NULL_TREE;
13634 /* Gimplify the gross structure of an OMP_FOR statement. */
13636 static enum gimplify_status
13637 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
13639 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
13640 enum gimplify_status ret = GS_ALL_DONE;
13641 enum gimplify_status tret;
13642 gomp_for *gfor;
13643 gimple_seq for_body, for_pre_body;
13644 int i;
13645 bitmap has_decl_expr = NULL;
13646 enum omp_region_type ort = ORT_WORKSHARE;
13647 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
13649 orig_for_stmt = for_stmt = *expr_p;
13651 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
13652 != NULL_TREE);
13653 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13655 tree *data[4] = { NULL, NULL, NULL, NULL };
13656 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
13657 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
13658 find_combined_omp_for, data, NULL);
13659 if (inner_for_stmt == NULL_TREE)
13661 gcc_assert (seen_error ());
13662 *expr_p = NULL_TREE;
13663 return GS_ERROR;
13665 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
13667 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
13668 &OMP_FOR_PRE_BODY (for_stmt));
13669 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
13671 if (OMP_FOR_PRE_BODY (inner_for_stmt))
13673 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
13674 &OMP_FOR_PRE_BODY (for_stmt));
13675 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
13678 if (data[0])
13680 /* We have some statements or variable declarations in between
13681 the composite construct directives. Move them around the
13682 inner_for_stmt. */
13683 data[0] = expr_p;
13684 for (i = 0; i < 3; i++)
13685 if (data[i])
13687 tree t = *data[i];
13688 if (i < 2 && data[i + 1] == &OMP_BODY (t))
13689 data[i + 1] = data[i];
13690 *data[i] = OMP_BODY (t);
13691 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
13692 NULL_TREE, make_node (BLOCK));
13693 OMP_BODY (t) = body;
13694 append_to_statement_list_force (inner_for_stmt,
13695 &BIND_EXPR_BODY (body));
13696 *data[3] = t;
13697 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
13698 gcc_assert (*data[3] == inner_for_stmt);
13700 return GS_OK;
13703 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13704 if (!loop_p
13705 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
13706 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13707 i)) == TREE_LIST
13708 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13709 i)))
13711 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13712 /* Class iterators aren't allowed on OMP_SIMD, so the only
13713 case we need to solve is distribute parallel for. They are
13714 allowed on the loop construct, but that is already handled
13715 in gimplify_omp_loop. */
13716 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
13717 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
13718 && data[1]);
13719 tree orig_decl = TREE_PURPOSE (orig);
13720 tree last = TREE_VALUE (orig);
13721 tree *pc;
13722 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
13723 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
13724 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
13725 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
13726 && OMP_CLAUSE_DECL (*pc) == orig_decl)
13727 break;
13728 if (*pc == NULL_TREE)
13730 tree *spc;
13731 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
13732 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
13733 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
13734 && OMP_CLAUSE_DECL (*spc) == orig_decl)
13735 break;
13736 if (*spc)
13738 tree c = *spc;
13739 *spc = OMP_CLAUSE_CHAIN (c);
13740 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13741 *pc = c;
13744 if (*pc == NULL_TREE)
13746 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
13748 /* private clause will appear only on inner_for_stmt.
13749 Change it into firstprivate, and add private clause
13750 on for_stmt. */
13751 tree c = copy_node (*pc);
13752 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
13753 OMP_FOR_CLAUSES (for_stmt) = c;
13754 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
13755 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
13757 else
13759 /* lastprivate clause will appear on both inner_for_stmt
13760 and for_stmt. Add firstprivate clause to
13761 inner_for_stmt. */
13762 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
13763 OMP_CLAUSE_FIRSTPRIVATE);
13764 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
13765 OMP_CLAUSE_CHAIN (c) = *pc;
13766 *pc = c;
13767 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
13769 tree c = build_omp_clause (UNKNOWN_LOCATION,
13770 OMP_CLAUSE_FIRSTPRIVATE);
13771 OMP_CLAUSE_DECL (c) = last;
13772 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13773 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13774 c = build_omp_clause (UNKNOWN_LOCATION,
13775 *pc ? OMP_CLAUSE_SHARED
13776 : OMP_CLAUSE_FIRSTPRIVATE);
13777 OMP_CLAUSE_DECL (c) = orig_decl;
13778 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13779 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13781 /* Similarly, take care of C++ range for temporaries, those should
13782 be firstprivate on OMP_PARALLEL if any. */
13783 if (data[1])
13784 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13785 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
13786 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13787 i)) == TREE_LIST
13788 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13789 i)))
13791 tree orig
13792 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13793 tree v = TREE_CHAIN (orig);
13794 tree c = build_omp_clause (UNKNOWN_LOCATION,
13795 OMP_CLAUSE_FIRSTPRIVATE);
13796 /* First add firstprivate clause for the __for_end artificial
13797 decl. */
13798 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
13799 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
13800 == REFERENCE_TYPE)
13801 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
13802 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13803 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13804 if (TREE_VEC_ELT (v, 0))
13806 /* And now the same for __for_range artificial decl if it
13807 exists. */
13808 c = build_omp_clause (UNKNOWN_LOCATION,
13809 OMP_CLAUSE_FIRSTPRIVATE);
13810 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
13811 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
13812 == REFERENCE_TYPE)
13813 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
13814 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13815 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13820 switch (TREE_CODE (for_stmt))
13822 case OMP_FOR:
13823 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
13825 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13826 OMP_CLAUSE_SCHEDULE))
13827 error_at (EXPR_LOCATION (for_stmt),
13828 "%qs clause may not appear on non-rectangular %qs",
13829 "schedule", lang_GNU_Fortran () ? "do" : "for");
13830 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
13831 error_at (EXPR_LOCATION (for_stmt),
13832 "%qs clause may not appear on non-rectangular %qs",
13833 "ordered", lang_GNU_Fortran () ? "do" : "for");
13835 break;
13836 case OMP_DISTRIBUTE:
13837 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
13838 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13839 OMP_CLAUSE_DIST_SCHEDULE))
13840 error_at (EXPR_LOCATION (for_stmt),
13841 "%qs clause may not appear on non-rectangular %qs",
13842 "dist_schedule", "distribute");
13843 break;
13844 case OACC_LOOP:
13845 ort = ORT_ACC;
13846 break;
13847 case OMP_TASKLOOP:
13848 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
13850 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13851 OMP_CLAUSE_GRAINSIZE))
13852 error_at (EXPR_LOCATION (for_stmt),
13853 "%qs clause may not appear on non-rectangular %qs",
13854 "grainsize", "taskloop");
13855 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13856 OMP_CLAUSE_NUM_TASKS))
13857 error_at (EXPR_LOCATION (for_stmt),
13858 "%qs clause may not appear on non-rectangular %qs",
13859 "num_tasks", "taskloop");
13861 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
13862 ort = ORT_UNTIED_TASKLOOP;
13863 else
13864 ort = ORT_TASKLOOP;
13865 break;
13866 case OMP_SIMD:
13867 ort = ORT_SIMD;
13868 break;
13869 default:
13870 gcc_unreachable ();
13873 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
13874 clause for the IV. */
13875 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
13877 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
13878 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13879 decl = TREE_OPERAND (t, 0);
13880 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
13881 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13882 && OMP_CLAUSE_DECL (c) == decl)
13884 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
13885 break;
13889 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
13890 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
13891 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
13892 ? OMP_LOOP : TREE_CODE (for_stmt));
13894 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
13895 gimplify_omp_ctxp->distribute = true;
13897 /* Handle OMP_FOR_INIT. */
13898 for_pre_body = NULL;
13899 if ((ort == ORT_SIMD
13900 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
13901 && OMP_FOR_PRE_BODY (for_stmt))
13903 has_decl_expr = BITMAP_ALLOC (NULL);
13904 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
13905 && VAR_P (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))))
13907 t = OMP_FOR_PRE_BODY (for_stmt);
13908 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
13910 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
13912 tree_stmt_iterator si;
13913 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
13914 tsi_next (&si))
13916 t = tsi_stmt (si);
13917 if (TREE_CODE (t) == DECL_EXPR
13918 && VAR_P (DECL_EXPR_DECL (t)))
13919 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
13923 if (OMP_FOR_PRE_BODY (for_stmt))
13925 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
13926 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
13927 else
13929 struct gimplify_omp_ctx ctx;
13930 memset (&ctx, 0, sizeof (ctx));
13931 ctx.region_type = ORT_NONE;
13932 gimplify_omp_ctxp = &ctx;
13933 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
13934 gimplify_omp_ctxp = NULL;
13937 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
13939 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13940 for_stmt = inner_for_stmt;
13942 /* For taskloop, need to gimplify the start, end and step before the
13943 taskloop, outside of the taskloop omp context. */
13944 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
13946 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13948 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13949 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
13950 ? pre_p : &for_pre_body);
13951 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
13952 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
13954 tree v = TREE_OPERAND (t, 1);
13955 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
13956 for_pre_p, orig_for_stmt);
13957 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
13958 for_pre_p, orig_for_stmt);
13960 else
13961 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
13962 orig_for_stmt);
13964 /* Handle OMP_FOR_COND. */
13965 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
13966 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
13968 tree v = TREE_OPERAND (t, 1);
13969 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
13970 for_pre_p, orig_for_stmt);
13971 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
13972 for_pre_p, orig_for_stmt);
13974 else
13975 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
13976 orig_for_stmt);
13978 /* Handle OMP_FOR_INCR. */
13979 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
13980 if (TREE_CODE (t) == MODIFY_EXPR)
13982 decl = TREE_OPERAND (t, 0);
13983 t = TREE_OPERAND (t, 1);
13984 tree *tp = &TREE_OPERAND (t, 1);
13985 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
13986 tp = &TREE_OPERAND (t, 0);
13988 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
13989 orig_for_stmt);
13993 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
13994 OMP_TASKLOOP);
13997 if (orig_for_stmt != for_stmt)
13998 gimplify_omp_ctxp->combined_loop = true;
14000 for_body = NULL;
14001 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14002 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
14003 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14004 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
14006 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
14007 bool is_doacross = false;
14008 if (c && walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt),
14009 find_standalone_omp_ordered, NULL))
14011 OMP_CLAUSE_ORDERED_DOACROSS (c) = 1;
14012 is_doacross = true;
14013 int len = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
14014 gimplify_omp_ctxp->loop_iter_var.create (len * 2);
14015 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
14016 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LINEAR)
14018 error_at (OMP_CLAUSE_LOCATION (*pc),
14019 "%<linear%> clause may not be specified together "
14020 "with %<ordered%> clause if stand-alone %<ordered%> "
14021 "construct is nested in it");
14022 *pc = OMP_CLAUSE_CHAIN (*pc);
14024 else
14025 pc = &OMP_CLAUSE_CHAIN (*pc);
14027 int collapse = 1, tile = 0;
14028 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
14029 if (c)
14030 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
14031 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
14032 if (c)
14033 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
14034 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
14035 hash_set<tree> *allocate_uids = NULL;
14036 if (c)
14038 allocate_uids = new hash_set<tree>;
14039 for (; c; c = OMP_CLAUSE_CHAIN (c))
14040 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
14041 allocate_uids->add (OMP_CLAUSE_DECL (c));
14043 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14045 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14046 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14047 decl = TREE_OPERAND (t, 0);
14048 gcc_assert (DECL_P (decl));
14049 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
14050 || POINTER_TYPE_P (TREE_TYPE (decl)));
14051 if (is_doacross)
14053 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
14055 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14056 if (TREE_CODE (orig_decl) == TREE_LIST)
14058 orig_decl = TREE_PURPOSE (orig_decl);
14059 if (!orig_decl)
14060 orig_decl = decl;
14062 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
14064 else
14065 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
14066 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
14069 if (for_stmt == orig_for_stmt)
14071 tree orig_decl = decl;
14072 if (OMP_FOR_ORIG_DECLS (for_stmt))
14074 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14075 if (TREE_CODE (orig_decl) == TREE_LIST)
14077 orig_decl = TREE_PURPOSE (orig_decl);
14078 if (!orig_decl)
14079 orig_decl = decl;
14082 if (is_global_var (orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
14083 error_at (EXPR_LOCATION (for_stmt),
14084 "threadprivate iteration variable %qD", orig_decl);
14087 /* Make sure the iteration variable is private. */
14088 tree c = NULL_TREE;
14089 tree c2 = NULL_TREE;
14090 if (orig_for_stmt != for_stmt)
14092 /* Preserve this information until we gimplify the inner simd. */
14093 if (has_decl_expr
14094 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14095 TREE_PRIVATE (t) = 1;
14097 else if (ort == ORT_SIMD)
14099 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14100 (splay_tree_key) decl);
14101 omp_is_private (gimplify_omp_ctxp, decl,
14102 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14103 != 1));
14104 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
14106 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14107 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
14108 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14109 OMP_CLAUSE_LASTPRIVATE);
14110 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14111 OMP_CLAUSE_LASTPRIVATE))
14112 if (OMP_CLAUSE_DECL (c3) == decl)
14114 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14115 "conditional %<lastprivate%> on loop "
14116 "iterator %qD ignored", decl);
14117 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14118 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14121 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
14123 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14124 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
14125 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
14126 if ((has_decl_expr
14127 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14128 || TREE_PRIVATE (t))
14130 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14131 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14133 struct gimplify_omp_ctx *outer
14134 = gimplify_omp_ctxp->outer_context;
14135 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14137 if (outer->region_type == ORT_WORKSHARE
14138 && outer->combined_loop)
14140 n = splay_tree_lookup (outer->variables,
14141 (splay_tree_key)decl);
14142 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14144 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14145 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14147 else
14149 struct gimplify_omp_ctx *octx = outer->outer_context;
14150 if (octx
14151 && octx->region_type == ORT_COMBINED_PARALLEL
14152 && octx->outer_context
14153 && (octx->outer_context->region_type
14154 == ORT_WORKSHARE)
14155 && octx->outer_context->combined_loop)
14157 octx = octx->outer_context;
14158 n = splay_tree_lookup (octx->variables,
14159 (splay_tree_key)decl);
14160 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14162 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14163 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14170 OMP_CLAUSE_DECL (c) = decl;
14171 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14172 OMP_FOR_CLAUSES (for_stmt) = c;
14173 omp_add_variable (gimplify_omp_ctxp, decl, flags);
14174 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14175 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14176 true);
14178 else
14180 bool lastprivate
14181 = (!has_decl_expr
14182 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
14183 if (TREE_PRIVATE (t))
14184 lastprivate = false;
14185 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
14187 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14188 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
14189 lastprivate = false;
14192 struct gimplify_omp_ctx *outer
14193 = gimplify_omp_ctxp->outer_context;
14194 if (outer && lastprivate)
14195 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14196 true);
14198 c = build_omp_clause (input_location,
14199 lastprivate ? OMP_CLAUSE_LASTPRIVATE
14200 : OMP_CLAUSE_PRIVATE);
14201 OMP_CLAUSE_DECL (c) = decl;
14202 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14203 OMP_FOR_CLAUSES (for_stmt) = c;
14204 omp_add_variable (gimplify_omp_ctxp, decl,
14205 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
14206 | GOVD_EXPLICIT | GOVD_SEEN);
14207 c = NULL_TREE;
14210 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
14212 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14213 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14214 (splay_tree_key) decl);
14215 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
14216 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14217 OMP_CLAUSE_LASTPRIVATE);
14218 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14219 OMP_CLAUSE_LASTPRIVATE))
14220 if (OMP_CLAUSE_DECL (c3) == decl)
14222 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14223 "conditional %<lastprivate%> on loop "
14224 "iterator %qD ignored", decl);
14225 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14226 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14229 else
14230 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
14232 /* If DECL is not a gimple register, create a temporary variable to act
14233 as an iteration counter. This is valid, since DECL cannot be
14234 modified in the body of the loop. Similarly for any iteration vars
14235 in simd with collapse > 1 where the iterator vars must be
14236 lastprivate. And similarly for vars mentioned in allocate clauses. */
14237 if (orig_for_stmt != for_stmt)
14238 var = decl;
14239 else if (!is_gimple_reg (decl)
14240 || (ort == ORT_SIMD
14241 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
14242 || (allocate_uids && allocate_uids->contains (decl)))
14244 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14245 /* Make sure omp_add_variable is not called on it prematurely.
14246 We call it ourselves a few lines later. */
14247 gimplify_omp_ctxp = NULL;
14248 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14249 gimplify_omp_ctxp = ctx;
14250 TREE_OPERAND (t, 0) = var;
14252 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
14254 if (ort == ORT_SIMD
14255 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
14257 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14258 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
14259 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
14260 OMP_CLAUSE_DECL (c2) = var;
14261 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
14262 OMP_FOR_CLAUSES (for_stmt) = c2;
14263 omp_add_variable (gimplify_omp_ctxp, var,
14264 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
14265 if (c == NULL_TREE)
14267 c = c2;
14268 c2 = NULL_TREE;
14271 else
14272 omp_add_variable (gimplify_omp_ctxp, var,
14273 GOVD_PRIVATE | GOVD_SEEN);
14275 else
14276 var = decl;
14278 gimplify_omp_ctxp->in_for_exprs = true;
14279 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14281 tree lb = TREE_OPERAND (t, 1);
14282 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
14283 is_gimple_val, fb_rvalue, false);
14284 ret = MIN (ret, tret);
14285 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
14286 is_gimple_val, fb_rvalue, false);
14288 else
14289 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14290 is_gimple_val, fb_rvalue, false);
14291 gimplify_omp_ctxp->in_for_exprs = false;
14292 ret = MIN (ret, tret);
14293 if (ret == GS_ERROR)
14294 return ret;
14296 /* Handle OMP_FOR_COND. */
14297 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14298 gcc_assert (COMPARISON_CLASS_P (t));
14299 gcc_assert (TREE_OPERAND (t, 0) == decl);
14301 gimplify_omp_ctxp->in_for_exprs = true;
14302 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14304 tree ub = TREE_OPERAND (t, 1);
14305 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
14306 is_gimple_val, fb_rvalue, false);
14307 ret = MIN (ret, tret);
14308 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
14309 is_gimple_val, fb_rvalue, false);
14311 else
14312 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14313 is_gimple_val, fb_rvalue, false);
14314 gimplify_omp_ctxp->in_for_exprs = false;
14315 ret = MIN (ret, tret);
14317 /* Handle OMP_FOR_INCR. */
14318 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14319 switch (TREE_CODE (t))
14321 case PREINCREMENT_EXPR:
14322 case POSTINCREMENT_EXPR:
14324 tree decl = TREE_OPERAND (t, 0);
14325 /* c_omp_for_incr_canonicalize_ptr() should have been
14326 called to massage things appropriately. */
14327 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14329 if (orig_for_stmt != for_stmt)
14330 break;
14331 t = build_int_cst (TREE_TYPE (decl), 1);
14332 if (c)
14333 OMP_CLAUSE_LINEAR_STEP (c) = t;
14334 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14335 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14336 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14337 break;
14340 case PREDECREMENT_EXPR:
14341 case POSTDECREMENT_EXPR:
14342 /* c_omp_for_incr_canonicalize_ptr() should have been
14343 called to massage things appropriately. */
14344 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14345 if (orig_for_stmt != for_stmt)
14346 break;
14347 t = build_int_cst (TREE_TYPE (decl), -1);
14348 if (c)
14349 OMP_CLAUSE_LINEAR_STEP (c) = t;
14350 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14351 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14352 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14353 break;
14355 case MODIFY_EXPR:
14356 gcc_assert (TREE_OPERAND (t, 0) == decl);
14357 TREE_OPERAND (t, 0) = var;
14359 t = TREE_OPERAND (t, 1);
14360 switch (TREE_CODE (t))
14362 case PLUS_EXPR:
14363 if (TREE_OPERAND (t, 1) == decl)
14365 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
14366 TREE_OPERAND (t, 0) = var;
14367 break;
14370 /* Fallthru. */
14371 case MINUS_EXPR:
14372 case POINTER_PLUS_EXPR:
14373 gcc_assert (TREE_OPERAND (t, 0) == decl);
14374 TREE_OPERAND (t, 0) = var;
14375 break;
14376 default:
14377 gcc_unreachable ();
14380 gimplify_omp_ctxp->in_for_exprs = true;
14381 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14382 is_gimple_val, fb_rvalue, false);
14383 ret = MIN (ret, tret);
14384 if (c)
14386 tree step = TREE_OPERAND (t, 1);
14387 tree stept = TREE_TYPE (decl);
14388 if (POINTER_TYPE_P (stept))
14389 stept = sizetype;
14390 step = fold_convert (stept, step);
14391 if (TREE_CODE (t) == MINUS_EXPR)
14392 step = fold_build1 (NEGATE_EXPR, stept, step);
14393 OMP_CLAUSE_LINEAR_STEP (c) = step;
14394 if (step != TREE_OPERAND (t, 1))
14396 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
14397 &for_pre_body, NULL,
14398 is_gimple_val, fb_rvalue, false);
14399 ret = MIN (ret, tret);
14402 gimplify_omp_ctxp->in_for_exprs = false;
14403 break;
14405 default:
14406 gcc_unreachable ();
14409 if (c2)
14411 gcc_assert (c);
14412 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
14415 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
14417 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
14418 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14419 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
14420 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14421 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
14422 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
14423 && OMP_CLAUSE_DECL (c) == decl)
14425 if (is_doacross && (collapse == 1 || i >= collapse))
14426 t = var;
14427 else
14429 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14430 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14431 gcc_assert (TREE_OPERAND (t, 0) == var);
14432 t = TREE_OPERAND (t, 1);
14433 gcc_assert (TREE_CODE (t) == PLUS_EXPR
14434 || TREE_CODE (t) == MINUS_EXPR
14435 || TREE_CODE (t) == POINTER_PLUS_EXPR);
14436 gcc_assert (TREE_OPERAND (t, 0) == var);
14437 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
14438 is_doacross ? var : decl,
14439 TREE_OPERAND (t, 1));
14441 gimple_seq *seq;
14442 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
14443 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
14444 else
14445 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
14446 push_gimplify_context ();
14447 gimplify_assign (decl, t, seq);
14448 gimple *bind = NULL;
14449 if (gimplify_ctxp->temps)
14451 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
14452 *seq = NULL;
14453 gimplify_seq_add_stmt (seq, bind);
14455 pop_gimplify_context (bind);
14458 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
14459 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14461 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14462 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14463 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14464 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14465 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14466 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14467 gcc_assert (COMPARISON_CLASS_P (t));
14468 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14469 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14470 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14474 BITMAP_FREE (has_decl_expr);
14475 delete allocate_uids;
14477 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14478 || (loop_p && orig_for_stmt == for_stmt))
14480 push_gimplify_context ();
14481 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
14483 OMP_FOR_BODY (orig_for_stmt)
14484 = build3 (BIND_EXPR, void_type_node, NULL,
14485 OMP_FOR_BODY (orig_for_stmt), NULL);
14486 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
14490 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
14491 &for_body);
14493 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14494 || (loop_p && orig_for_stmt == for_stmt))
14496 if (gimple_code (g) == GIMPLE_BIND)
14497 pop_gimplify_context (g);
14498 else
14499 pop_gimplify_context (NULL);
14502 if (orig_for_stmt != for_stmt)
14503 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14505 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14506 decl = TREE_OPERAND (t, 0);
14507 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14508 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14509 gimplify_omp_ctxp = ctx->outer_context;
14510 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14511 gimplify_omp_ctxp = ctx;
14512 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
14513 TREE_OPERAND (t, 0) = var;
14514 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14515 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14516 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
14517 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14518 for (int j = i + 1;
14519 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14521 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14522 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14523 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14524 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14526 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14527 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14529 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14530 gcc_assert (COMPARISON_CLASS_P (t));
14531 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14532 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14534 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14535 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14540 gimplify_adjust_omp_clauses (pre_p, for_body,
14541 &OMP_FOR_CLAUSES (orig_for_stmt),
14542 TREE_CODE (orig_for_stmt));
14544 int kind;
14545 switch (TREE_CODE (orig_for_stmt))
14547 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
14548 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
14549 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
14550 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
14551 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
14552 default:
14553 gcc_unreachable ();
14555 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
14557 gimplify_seq_add_seq (pre_p, for_pre_body);
14558 for_pre_body = NULL;
14560 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
14561 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
14562 for_pre_body);
14563 if (orig_for_stmt != for_stmt)
14564 gimple_omp_for_set_combined_p (gfor, true);
14565 if (gimplify_omp_ctxp
14566 && (gimplify_omp_ctxp->combined_loop
14567 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
14568 && gimplify_omp_ctxp->outer_context
14569 && gimplify_omp_ctxp->outer_context->combined_loop)))
14571 gimple_omp_for_set_combined_into_p (gfor, true);
14572 if (gimplify_omp_ctxp->combined_loop)
14573 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
14574 else
14575 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
14578 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14580 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14581 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
14582 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
14583 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14584 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
14585 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
14586 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14587 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
14590 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
14591 constructs with GIMPLE_OMP_TASK sandwiched in between them.
14592 The outer taskloop stands for computing the number of iterations,
14593 counts for collapsed loops and holding taskloop specific clauses.
14594 The task construct stands for the effect of data sharing on the
14595 explicit task it creates and the inner taskloop stands for expansion
14596 of the static loop inside of the explicit task construct. */
14597 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14599 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
14600 tree task_clauses = NULL_TREE;
14601 tree c = *gfor_clauses_ptr;
14602 tree *gtask_clauses_ptr = &task_clauses;
14603 tree outer_for_clauses = NULL_TREE;
14604 tree *gforo_clauses_ptr = &outer_for_clauses;
14605 bitmap lastprivate_uids = NULL;
14606 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
14608 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
14609 if (c)
14611 lastprivate_uids = BITMAP_ALLOC (NULL);
14612 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14613 OMP_CLAUSE_LASTPRIVATE))
14614 bitmap_set_bit (lastprivate_uids,
14615 DECL_UID (OMP_CLAUSE_DECL (c)));
14617 c = *gfor_clauses_ptr;
14619 for (; c; c = OMP_CLAUSE_CHAIN (c))
14620 switch (OMP_CLAUSE_CODE (c))
14622 /* These clauses are allowed on task, move them there. */
14623 case OMP_CLAUSE_SHARED:
14624 case OMP_CLAUSE_FIRSTPRIVATE:
14625 case OMP_CLAUSE_DEFAULT:
14626 case OMP_CLAUSE_IF:
14627 case OMP_CLAUSE_UNTIED:
14628 case OMP_CLAUSE_FINAL:
14629 case OMP_CLAUSE_MERGEABLE:
14630 case OMP_CLAUSE_PRIORITY:
14631 case OMP_CLAUSE_REDUCTION:
14632 case OMP_CLAUSE_IN_REDUCTION:
14633 *gtask_clauses_ptr = c;
14634 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14635 break;
14636 case OMP_CLAUSE_PRIVATE:
14637 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
14639 /* We want private on outer for and firstprivate
14640 on task. */
14641 *gtask_clauses_ptr
14642 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14643 OMP_CLAUSE_FIRSTPRIVATE);
14644 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14645 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14646 openacc);
14647 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14648 *gforo_clauses_ptr = c;
14649 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14651 else
14653 *gtask_clauses_ptr = c;
14654 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14656 break;
14657 /* These clauses go into outer taskloop clauses. */
14658 case OMP_CLAUSE_GRAINSIZE:
14659 case OMP_CLAUSE_NUM_TASKS:
14660 case OMP_CLAUSE_NOGROUP:
14661 *gforo_clauses_ptr = c;
14662 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14663 break;
14664 /* Collapse clause we duplicate on both taskloops. */
14665 case OMP_CLAUSE_COLLAPSE:
14666 *gfor_clauses_ptr = c;
14667 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14668 *gforo_clauses_ptr = copy_node (c);
14669 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14670 break;
14671 /* For lastprivate, keep the clause on inner taskloop, and add
14672 a shared clause on task. If the same decl is also firstprivate,
14673 add also firstprivate clause on the inner taskloop. */
14674 case OMP_CLAUSE_LASTPRIVATE:
14675 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
14677 /* For taskloop C++ lastprivate IVs, we want:
14678 1) private on outer taskloop
14679 2) firstprivate and shared on task
14680 3) lastprivate on inner taskloop */
14681 *gtask_clauses_ptr
14682 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14683 OMP_CLAUSE_FIRSTPRIVATE);
14684 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14685 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14686 openacc);
14687 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14688 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
14689 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14690 OMP_CLAUSE_PRIVATE);
14691 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
14692 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
14693 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
14694 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14696 *gfor_clauses_ptr = c;
14697 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14698 *gtask_clauses_ptr
14699 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
14700 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14701 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
14702 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
14703 gtask_clauses_ptr
14704 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14705 break;
14706 /* Allocate clause we duplicate on task and inner taskloop
14707 if the decl is lastprivate, otherwise just put on task. */
14708 case OMP_CLAUSE_ALLOCATE:
14709 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
14710 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
14712 /* Additionally, put firstprivate clause on task
14713 for the allocator if it is not constant. */
14714 *gtask_clauses_ptr
14715 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14716 OMP_CLAUSE_FIRSTPRIVATE);
14717 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
14718 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
14719 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14721 if (lastprivate_uids
14722 && bitmap_bit_p (lastprivate_uids,
14723 DECL_UID (OMP_CLAUSE_DECL (c))))
14725 *gfor_clauses_ptr = c;
14726 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14727 *gtask_clauses_ptr = copy_node (c);
14728 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14730 else
14732 *gtask_clauses_ptr = c;
14733 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14735 break;
14736 default:
14737 gcc_unreachable ();
14739 *gfor_clauses_ptr = NULL_TREE;
14740 *gtask_clauses_ptr = NULL_TREE;
14741 *gforo_clauses_ptr = NULL_TREE;
14742 BITMAP_FREE (lastprivate_uids);
14743 gimple_set_location (gfor, input_location);
14744 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
14745 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
14746 NULL_TREE, NULL_TREE, NULL_TREE);
14747 gimple_set_location (g, input_location);
14748 gimple_omp_task_set_taskloop_p (g, true);
14749 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
14750 gomp_for *gforo
14751 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
14752 gimple_omp_for_collapse (gfor),
14753 gimple_omp_for_pre_body (gfor));
14754 gimple_omp_for_set_pre_body (gfor, NULL);
14755 gimple_omp_for_set_combined_p (gforo, true);
14756 gimple_omp_for_set_combined_into_p (gfor, true);
14757 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
14759 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
14760 tree v = create_tmp_var (type);
14761 gimple_omp_for_set_index (gforo, i, v);
14762 t = unshare_expr (gimple_omp_for_initial (gfor, i));
14763 gimple_omp_for_set_initial (gforo, i, t);
14764 gimple_omp_for_set_cond (gforo, i,
14765 gimple_omp_for_cond (gfor, i));
14766 t = unshare_expr (gimple_omp_for_final (gfor, i));
14767 gimple_omp_for_set_final (gforo, i, t);
14768 t = unshare_expr (gimple_omp_for_incr (gfor, i));
14769 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
14770 TREE_OPERAND (t, 0) = v;
14771 gimple_omp_for_set_incr (gforo, i, t);
14772 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
14773 OMP_CLAUSE_DECL (t) = v;
14774 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
14775 gimple_omp_for_set_clauses (gforo, t);
14776 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14778 tree *p1 = NULL, *p2 = NULL;
14779 t = gimple_omp_for_initial (gforo, i);
14780 if (TREE_CODE (t) == TREE_VEC)
14781 p1 = &TREE_VEC_ELT (t, 0);
14782 t = gimple_omp_for_final (gforo, i);
14783 if (TREE_CODE (t) == TREE_VEC)
14785 if (p1)
14786 p2 = &TREE_VEC_ELT (t, 0);
14787 else
14788 p1 = &TREE_VEC_ELT (t, 0);
14790 if (p1)
14792 int j;
14793 for (j = 0; j < i; j++)
14794 if (*p1 == gimple_omp_for_index (gfor, j))
14796 *p1 = gimple_omp_for_index (gforo, j);
14797 if (p2)
14798 *p2 = *p1;
14799 break;
14801 gcc_assert (j < i);
14805 gimplify_seq_add_stmt (pre_p, gforo);
14807 else
14808 gimplify_seq_add_stmt (pre_p, gfor);
14810 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
14812 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14813 unsigned lastprivate_conditional = 0;
14814 while (ctx
14815 && (ctx->region_type == ORT_TARGET_DATA
14816 || ctx->region_type == ORT_TASKGROUP))
14817 ctx = ctx->outer_context;
14818 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
14819 for (tree c = gimple_omp_for_clauses (gfor);
14820 c; c = OMP_CLAUSE_CHAIN (c))
14821 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14822 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
14823 ++lastprivate_conditional;
14824 if (lastprivate_conditional)
14826 struct omp_for_data fd;
14827 omp_extract_for_data (gfor, &fd, NULL);
14828 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
14829 lastprivate_conditional);
14830 tree var = create_tmp_var_raw (type);
14831 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
14832 OMP_CLAUSE_DECL (c) = var;
14833 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
14834 gimple_omp_for_set_clauses (gfor, c);
14835 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
14838 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
14840 unsigned lastprivate_conditional = 0;
14841 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
14842 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14843 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
14844 ++lastprivate_conditional;
14845 if (lastprivate_conditional)
14847 struct omp_for_data fd;
14848 omp_extract_for_data (gfor, &fd, NULL);
14849 tree type = unsigned_type_for (fd.iter_type);
14850 while (lastprivate_conditional--)
14852 tree c = build_omp_clause (UNKNOWN_LOCATION,
14853 OMP_CLAUSE__CONDTEMP_);
14854 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
14855 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
14856 gimple_omp_for_set_clauses (gfor, c);
14861 if (ret != GS_ALL_DONE)
14862 return GS_ERROR;
14863 *expr_p = NULL_TREE;
14864 return GS_ALL_DONE;
14867 /* Helper for gimplify_omp_loop, called through walk_tree. */
14869 static tree
14870 note_no_context_vars (tree *tp, int *, void *data)
14872 if (VAR_P (*tp)
14873 && DECL_CONTEXT (*tp) == NULL_TREE
14874 && !is_global_var (*tp))
14876 vec<tree> *d = (vec<tree> *) data;
14877 d->safe_push (*tp);
14878 DECL_CONTEXT (*tp) = current_function_decl;
14880 return NULL_TREE;
14883 /* Gimplify the gross structure of an OMP_LOOP statement. */
14885 static enum gimplify_status
14886 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
14888 tree for_stmt = *expr_p;
14889 tree clauses = OMP_FOR_CLAUSES (for_stmt);
14890 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
14891 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
14892 int i;
14894 /* If order is not present, the behavior is as if order(concurrent)
14895 appeared. */
14896 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
14897 if (order == NULL_TREE)
14899 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
14900 OMP_CLAUSE_CHAIN (order) = clauses;
14901 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
14904 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
14905 if (bind == NULL_TREE)
14907 if (!flag_openmp) /* flag_openmp_simd */
14909 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
14910 kind = OMP_CLAUSE_BIND_TEAMS;
14911 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
14912 kind = OMP_CLAUSE_BIND_PARALLEL;
14913 else
14915 for (; octx; octx = octx->outer_context)
14917 if ((octx->region_type & ORT_ACC) != 0
14918 || octx->region_type == ORT_NONE
14919 || octx->region_type == ORT_IMPLICIT_TARGET)
14920 continue;
14921 break;
14923 if (octx == NULL && !in_omp_construct)
14924 error_at (EXPR_LOCATION (for_stmt),
14925 "%<bind%> clause not specified on a %<loop%> "
14926 "construct not nested inside another OpenMP construct");
14928 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
14929 OMP_CLAUSE_CHAIN (bind) = clauses;
14930 OMP_CLAUSE_BIND_KIND (bind) = kind;
14931 OMP_FOR_CLAUSES (for_stmt) = bind;
14933 else
14934 switch (OMP_CLAUSE_BIND_KIND (bind))
14936 case OMP_CLAUSE_BIND_THREAD:
14937 break;
14938 case OMP_CLAUSE_BIND_PARALLEL:
14939 if (!flag_openmp) /* flag_openmp_simd */
14941 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14942 break;
14944 for (; octx; octx = octx->outer_context)
14945 if (octx->region_type == ORT_SIMD
14946 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
14948 error_at (EXPR_LOCATION (for_stmt),
14949 "%<bind(parallel)%> on a %<loop%> construct nested "
14950 "inside %<simd%> construct");
14951 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14952 break;
14954 kind = OMP_CLAUSE_BIND_PARALLEL;
14955 break;
14956 case OMP_CLAUSE_BIND_TEAMS:
14957 if (!flag_openmp) /* flag_openmp_simd */
14959 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14960 break;
14962 if ((octx
14963 && octx->region_type != ORT_IMPLICIT_TARGET
14964 && octx->region_type != ORT_NONE
14965 && (octx->region_type & ORT_TEAMS) == 0)
14966 || in_omp_construct)
14968 error_at (EXPR_LOCATION (for_stmt),
14969 "%<bind(teams)%> on a %<loop%> region not strictly "
14970 "nested inside of a %<teams%> region");
14971 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14972 break;
14974 kind = OMP_CLAUSE_BIND_TEAMS;
14975 break;
14976 default:
14977 gcc_unreachable ();
14980 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
14981 switch (OMP_CLAUSE_CODE (*pc))
14983 case OMP_CLAUSE_REDUCTION:
14984 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
14986 error_at (OMP_CLAUSE_LOCATION (*pc),
14987 "%<inscan%> %<reduction%> clause on "
14988 "%qs construct", "loop");
14989 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
14991 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
14993 error_at (OMP_CLAUSE_LOCATION (*pc),
14994 "invalid %<task%> reduction modifier on construct "
14995 "other than %<parallel%>, %qs or %<sections%>",
14996 lang_GNU_Fortran () ? "do" : "for");
14997 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
14999 pc = &OMP_CLAUSE_CHAIN (*pc);
15000 break;
15001 case OMP_CLAUSE_LASTPRIVATE:
15002 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15004 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
15005 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15006 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
15007 break;
15008 if (OMP_FOR_ORIG_DECLS (for_stmt)
15009 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
15010 i)) == TREE_LIST
15011 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
15012 i)))
15014 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15015 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
15016 break;
15019 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
15021 error_at (OMP_CLAUSE_LOCATION (*pc),
15022 "%<lastprivate%> clause on a %<loop%> construct refers "
15023 "to a variable %qD which is not the loop iterator",
15024 OMP_CLAUSE_DECL (*pc));
15025 *pc = OMP_CLAUSE_CHAIN (*pc);
15026 break;
15028 pc = &OMP_CLAUSE_CHAIN (*pc);
15029 break;
15030 default:
15031 pc = &OMP_CLAUSE_CHAIN (*pc);
15032 break;
15035 TREE_SET_CODE (for_stmt, OMP_SIMD);
15037 int last;
15038 switch (kind)
15040 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
15041 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
15042 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
15044 for (int pass = 1; pass <= last; pass++)
15046 if (pass == 2)
15048 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
15049 make_node (BLOCK));
15050 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
15051 *expr_p = make_node (OMP_PARALLEL);
15052 TREE_TYPE (*expr_p) = void_type_node;
15053 OMP_PARALLEL_BODY (*expr_p) = bind;
15054 OMP_PARALLEL_COMBINED (*expr_p) = 1;
15055 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
15056 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
15057 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15058 if (OMP_FOR_ORIG_DECLS (for_stmt)
15059 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
15060 == TREE_LIST))
15062 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15063 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
15065 *pc = build_omp_clause (UNKNOWN_LOCATION,
15066 OMP_CLAUSE_FIRSTPRIVATE);
15067 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
15068 pc = &OMP_CLAUSE_CHAIN (*pc);
15072 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
15073 tree *pc = &OMP_FOR_CLAUSES (t);
15074 TREE_TYPE (t) = void_type_node;
15075 OMP_FOR_BODY (t) = *expr_p;
15076 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
15077 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
15078 switch (OMP_CLAUSE_CODE (c))
15080 case OMP_CLAUSE_BIND:
15081 case OMP_CLAUSE_ORDER:
15082 case OMP_CLAUSE_COLLAPSE:
15083 *pc = copy_node (c);
15084 pc = &OMP_CLAUSE_CHAIN (*pc);
15085 break;
15086 case OMP_CLAUSE_PRIVATE:
15087 case OMP_CLAUSE_FIRSTPRIVATE:
15088 /* Only needed on innermost. */
15089 break;
15090 case OMP_CLAUSE_LASTPRIVATE:
15091 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
15093 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
15094 OMP_CLAUSE_FIRSTPRIVATE);
15095 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
15096 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15097 pc = &OMP_CLAUSE_CHAIN (*pc);
15099 *pc = copy_node (c);
15100 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
15101 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15102 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
15104 if (pass != last)
15105 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
15106 else
15107 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15108 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
15110 pc = &OMP_CLAUSE_CHAIN (*pc);
15111 break;
15112 case OMP_CLAUSE_REDUCTION:
15113 *pc = copy_node (c);
15114 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
15115 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15116 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
15118 auto_vec<tree> no_context_vars;
15119 int walk_subtrees = 0;
15120 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15121 &walk_subtrees, &no_context_vars);
15122 if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
15123 note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
15124 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
15125 note_no_context_vars,
15126 &no_context_vars);
15127 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
15128 note_no_context_vars,
15129 &no_context_vars);
15131 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
15132 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
15133 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15134 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
15135 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
15137 hash_map<tree, tree> decl_map;
15138 decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
15139 decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15140 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
15141 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15142 decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
15143 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
15145 copy_body_data id;
15146 memset (&id, 0, sizeof (id));
15147 id.src_fn = current_function_decl;
15148 id.dst_fn = current_function_decl;
15149 id.src_cfun = cfun;
15150 id.decl_map = &decl_map;
15151 id.copy_decl = copy_decl_no_change;
15152 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
15153 id.transform_new_cfg = true;
15154 id.transform_return_to_modify = false;
15155 id.eh_lp_nr = 0;
15156 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
15157 &id, NULL);
15158 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
15159 &id, NULL);
15161 for (tree d : no_context_vars)
15163 DECL_CONTEXT (d) = NULL_TREE;
15164 DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
15167 else
15169 OMP_CLAUSE_REDUCTION_INIT (*pc)
15170 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
15171 OMP_CLAUSE_REDUCTION_MERGE (*pc)
15172 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
15174 pc = &OMP_CLAUSE_CHAIN (*pc);
15175 break;
15176 default:
15177 gcc_unreachable ();
15179 *pc = NULL_TREE;
15180 *expr_p = t;
15182 return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
15186 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
15187 of OMP_TARGET's body. */
15189 static tree
15190 find_omp_teams (tree *tp, int *walk_subtrees, void *)
15192 *walk_subtrees = 0;
15193 switch (TREE_CODE (*tp))
15195 case OMP_TEAMS:
15196 return *tp;
15197 case BIND_EXPR:
15198 case STATEMENT_LIST:
15199 *walk_subtrees = 1;
15200 break;
15201 default:
15202 break;
15204 return NULL_TREE;
15207 /* Helper function of optimize_target_teams, determine if the expression
15208 can be computed safely before the target construct on the host. */
15210 static tree
15211 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
15213 splay_tree_node n;
15215 if (TYPE_P (*tp))
15217 *walk_subtrees = 0;
15218 return NULL_TREE;
15220 switch (TREE_CODE (*tp))
15222 case VAR_DECL:
15223 case PARM_DECL:
15224 case RESULT_DECL:
15225 *walk_subtrees = 0;
15226 if (error_operand_p (*tp)
15227 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
15228 || DECL_HAS_VALUE_EXPR_P (*tp)
15229 || DECL_THREAD_LOCAL_P (*tp)
15230 || TREE_SIDE_EFFECTS (*tp)
15231 || TREE_THIS_VOLATILE (*tp))
15232 return *tp;
15233 if (is_global_var (*tp)
15234 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
15235 || lookup_attribute ("omp declare target link",
15236 DECL_ATTRIBUTES (*tp))))
15237 return *tp;
15238 if (VAR_P (*tp)
15239 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
15240 && !is_global_var (*tp)
15241 && decl_function_context (*tp) == current_function_decl)
15242 return *tp;
15243 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15244 (splay_tree_key) *tp);
15245 if (n == NULL)
15247 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
15248 return NULL_TREE;
15249 return *tp;
15251 else if (n->value & GOVD_LOCAL)
15252 return *tp;
15253 else if (n->value & GOVD_FIRSTPRIVATE)
15254 return NULL_TREE;
15255 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15256 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15257 return NULL_TREE;
15258 return *tp;
15259 case INTEGER_CST:
15260 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15261 return *tp;
15262 return NULL_TREE;
15263 case TARGET_EXPR:
15264 if (TARGET_EXPR_INITIAL (*tp)
15265 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
15266 return *tp;
15267 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
15268 walk_subtrees, NULL);
15269 /* Allow some reasonable subset of integral arithmetics. */
15270 case PLUS_EXPR:
15271 case MINUS_EXPR:
15272 case MULT_EXPR:
15273 case TRUNC_DIV_EXPR:
15274 case CEIL_DIV_EXPR:
15275 case FLOOR_DIV_EXPR:
15276 case ROUND_DIV_EXPR:
15277 case TRUNC_MOD_EXPR:
15278 case CEIL_MOD_EXPR:
15279 case FLOOR_MOD_EXPR:
15280 case ROUND_MOD_EXPR:
15281 case RDIV_EXPR:
15282 case EXACT_DIV_EXPR:
15283 case MIN_EXPR:
15284 case MAX_EXPR:
15285 case LSHIFT_EXPR:
15286 case RSHIFT_EXPR:
15287 case BIT_IOR_EXPR:
15288 case BIT_XOR_EXPR:
15289 case BIT_AND_EXPR:
15290 case NEGATE_EXPR:
15291 case ABS_EXPR:
15292 case BIT_NOT_EXPR:
15293 case NON_LVALUE_EXPR:
15294 CASE_CONVERT:
15295 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15296 return *tp;
15297 return NULL_TREE;
15298 /* And disallow anything else, except for comparisons. */
15299 default:
15300 if (COMPARISON_CLASS_P (*tp))
15301 return NULL_TREE;
15302 return *tp;
15306 /* Try to determine if the num_teams and/or thread_limit expressions
15307 can have their values determined already before entering the
15308 target construct.
15309 INTEGER_CSTs trivially are,
15310 integral decls that are firstprivate (explicitly or implicitly)
15311 or explicitly map(always, to:) or map(always, tofrom:) on the target
15312 region too, and expressions involving simple arithmetics on those
15313 too, function calls are not ok, dereferencing something neither etc.
15314 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
15315 EXPR based on what we find:
15316 0 stands for clause not specified at all, use implementation default
15317 -1 stands for value that can't be determined easily before entering
15318 the target construct.
15319 -2 means that no explicit teams construct was specified
15320 If teams construct is not present at all, use 1 for num_teams
15321 and 0 for thread_limit (only one team is involved, and the thread
15322 limit is implementation defined. */
15324 static void
15325 optimize_target_teams (tree target, gimple_seq *pre_p)
15327 tree body = OMP_BODY (target);
15328 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
15329 tree num_teams_lower = NULL_TREE;
15330 tree num_teams_upper = integer_zero_node;
15331 tree thread_limit = integer_zero_node;
15332 location_t num_teams_loc = EXPR_LOCATION (target);
15333 location_t thread_limit_loc = EXPR_LOCATION (target);
15334 tree c, *p, expr;
15335 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
15337 if (teams == NULL_TREE)
15338 num_teams_upper = build_int_cst (integer_type_node, -2);
15339 else
15340 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
15342 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
15344 p = &num_teams_upper;
15345 num_teams_loc = OMP_CLAUSE_LOCATION (c);
15346 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
15348 expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
15349 if (TREE_CODE (expr) == INTEGER_CST)
15350 num_teams_lower = expr;
15351 else if (walk_tree (&expr, computable_teams_clause,
15352 NULL, NULL))
15353 num_teams_lower = integer_minus_one_node;
15354 else
15356 num_teams_lower = expr;
15357 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15358 if (gimplify_expr (&num_teams_lower, pre_p, NULL,
15359 is_gimple_val, fb_rvalue, false)
15360 == GS_ERROR)
15362 gimplify_omp_ctxp = target_ctx;
15363 num_teams_lower = integer_minus_one_node;
15365 else
15367 gimplify_omp_ctxp = target_ctx;
15368 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15369 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
15370 = num_teams_lower;
15375 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
15377 p = &thread_limit;
15378 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
15380 else
15381 continue;
15382 expr = OMP_CLAUSE_OPERAND (c, 0);
15383 if (TREE_CODE (expr) == INTEGER_CST)
15385 *p = expr;
15386 continue;
15388 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
15390 *p = integer_minus_one_node;
15391 continue;
15393 *p = expr;
15394 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15395 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
15396 == GS_ERROR)
15398 gimplify_omp_ctxp = target_ctx;
15399 *p = integer_minus_one_node;
15400 continue;
15402 gimplify_omp_ctxp = target_ctx;
15403 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15404 OMP_CLAUSE_OPERAND (c, 0) = *p;
15406 if (!omp_find_clause (OMP_TARGET_CLAUSES (target), OMP_CLAUSE_THREAD_LIMIT))
15408 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
15409 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
15410 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15411 OMP_TARGET_CLAUSES (target) = c;
15413 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
15414 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
15415 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
15416 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15417 OMP_TARGET_CLAUSES (target) = c;
15420 /* Gimplify the gross structure of several OMP constructs. */
15422 static void
15423 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
15425 tree expr = *expr_p;
15426 gimple *stmt;
15427 gimple_seq body = NULL;
15428 enum omp_region_type ort;
15430 switch (TREE_CODE (expr))
15432 case OMP_SECTIONS:
15433 case OMP_SINGLE:
15434 ort = ORT_WORKSHARE;
15435 break;
15436 case OMP_SCOPE:
15437 ort = ORT_TASKGROUP;
15438 break;
15439 case OMP_TARGET:
15440 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
15441 break;
15442 case OACC_KERNELS:
15443 ort = ORT_ACC_KERNELS;
15444 break;
15445 case OACC_PARALLEL:
15446 ort = ORT_ACC_PARALLEL;
15447 break;
15448 case OACC_SERIAL:
15449 ort = ORT_ACC_SERIAL;
15450 break;
15451 case OACC_DATA:
15452 ort = ORT_ACC_DATA;
15453 break;
15454 case OMP_TARGET_DATA:
15455 ort = ORT_TARGET_DATA;
15456 break;
15457 case OMP_TEAMS:
15458 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
15459 if (gimplify_omp_ctxp == NULL
15460 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
15461 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
15462 break;
15463 case OACC_HOST_DATA:
15464 ort = ORT_ACC_HOST_DATA;
15465 break;
15466 default:
15467 gcc_unreachable ();
15470 bool save_in_omp_construct = in_omp_construct;
15471 if ((ort & ORT_ACC) == 0)
15472 in_omp_construct = false;
15473 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
15474 TREE_CODE (expr));
15475 if (TREE_CODE (expr) == OMP_TARGET)
15476 optimize_target_teams (expr, pre_p);
15477 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
15478 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15480 push_gimplify_context ();
15481 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
15482 if (gimple_code (g) == GIMPLE_BIND)
15483 pop_gimplify_context (g);
15484 else
15485 pop_gimplify_context (NULL);
15486 if ((ort & ORT_TARGET_DATA) != 0)
15488 enum built_in_function end_ix;
15489 switch (TREE_CODE (expr))
15491 case OACC_DATA:
15492 case OACC_HOST_DATA:
15493 end_ix = BUILT_IN_GOACC_DATA_END;
15494 break;
15495 case OMP_TARGET_DATA:
15496 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
15497 break;
15498 default:
15499 gcc_unreachable ();
15501 tree fn = builtin_decl_explicit (end_ix);
15502 g = gimple_build_call (fn, 0);
15503 gimple_seq cleanup = NULL;
15504 gimple_seq_add_stmt (&cleanup, g);
15505 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
15506 body = NULL;
15507 gimple_seq_add_stmt (&body, g);
15510 else
15511 gimplify_and_add (OMP_BODY (expr), &body);
15512 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
15513 TREE_CODE (expr));
15514 in_omp_construct = save_in_omp_construct;
15516 switch (TREE_CODE (expr))
15518 case OACC_DATA:
15519 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
15520 OMP_CLAUSES (expr));
15521 break;
15522 case OACC_HOST_DATA:
15523 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
15525 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15526 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
15527 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
15530 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
15531 OMP_CLAUSES (expr));
15532 break;
15533 case OACC_KERNELS:
15534 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
15535 OMP_CLAUSES (expr));
15536 break;
15537 case OACC_PARALLEL:
15538 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
15539 OMP_CLAUSES (expr));
15540 break;
15541 case OACC_SERIAL:
15542 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
15543 OMP_CLAUSES (expr));
15544 break;
15545 case OMP_SECTIONS:
15546 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
15547 break;
15548 case OMP_SINGLE:
15549 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
15550 break;
15551 case OMP_SCOPE:
15552 stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
15553 break;
15554 case OMP_TARGET:
15555 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
15556 OMP_CLAUSES (expr));
15557 break;
15558 case OMP_TARGET_DATA:
15559 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
15560 to be evaluated before the use_device_{ptr,addr} clauses if they
15561 refer to the same variables. */
15563 tree use_device_clauses;
15564 tree *pc, *uc = &use_device_clauses;
15565 for (pc = &OMP_CLAUSES (expr); *pc; )
15566 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
15567 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
15569 *uc = *pc;
15570 *pc = OMP_CLAUSE_CHAIN (*pc);
15571 uc = &OMP_CLAUSE_CHAIN (*uc);
15573 else
15574 pc = &OMP_CLAUSE_CHAIN (*pc);
15575 *uc = NULL_TREE;
15576 *pc = use_device_clauses;
15577 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
15578 OMP_CLAUSES (expr));
15580 break;
15581 case OMP_TEAMS:
15582 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
15583 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15584 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
15585 break;
15586 default:
15587 gcc_unreachable ();
15590 gimplify_seq_add_stmt (pre_p, stmt);
15591 *expr_p = NULL_TREE;
15594 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
15595 target update constructs. */
15597 static void
15598 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
15600 tree expr = *expr_p;
15601 int kind;
15602 gomp_target *stmt;
15603 enum omp_region_type ort = ORT_WORKSHARE;
15605 switch (TREE_CODE (expr))
15607 case OACC_ENTER_DATA:
15608 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
15609 ort = ORT_ACC;
15610 break;
15611 case OACC_EXIT_DATA:
15612 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
15613 ort = ORT_ACC;
15614 break;
15615 case OACC_UPDATE:
15616 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
15617 ort = ORT_ACC;
15618 break;
15619 case OMP_TARGET_UPDATE:
15620 kind = GF_OMP_TARGET_KIND_UPDATE;
15621 break;
15622 case OMP_TARGET_ENTER_DATA:
15623 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
15624 break;
15625 case OMP_TARGET_EXIT_DATA:
15626 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
15627 break;
15628 default:
15629 gcc_unreachable ();
15631 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
15632 ort, TREE_CODE (expr));
15633 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
15634 TREE_CODE (expr));
15635 if (TREE_CODE (expr) == OACC_UPDATE
15636 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15637 OMP_CLAUSE_IF_PRESENT))
15639 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
15640 clause. */
15641 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15642 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15643 switch (OMP_CLAUSE_MAP_KIND (c))
15645 case GOMP_MAP_FORCE_TO:
15646 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
15647 break;
15648 case GOMP_MAP_FORCE_FROM:
15649 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
15650 break;
15651 default:
15652 break;
15655 else if (TREE_CODE (expr) == OACC_EXIT_DATA
15656 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15657 OMP_CLAUSE_FINALIZE))
15659 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
15660 semantics. */
15661 bool have_clause = false;
15662 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15663 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15664 switch (OMP_CLAUSE_MAP_KIND (c))
15666 case GOMP_MAP_FROM:
15667 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
15668 have_clause = true;
15669 break;
15670 case GOMP_MAP_RELEASE:
15671 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
15672 have_clause = true;
15673 break;
15674 case GOMP_MAP_TO_PSET:
15675 /* Fortran arrays with descriptors must map that descriptor when
15676 doing standalone "attach" operations (in OpenACC). In that
15677 case GOMP_MAP_TO_PSET appears by itself with no preceding
15678 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
15679 break;
15680 case GOMP_MAP_POINTER:
15681 /* TODO PR92929: we may see these here, but they'll always follow
15682 one of the clauses above, and will be handled by libgomp as
15683 one group, so no handling required here. */
15684 gcc_assert (have_clause);
15685 break;
15686 case GOMP_MAP_DETACH:
15687 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
15688 have_clause = false;
15689 break;
15690 case GOMP_MAP_STRUCT:
15691 have_clause = false;
15692 break;
15693 default:
15694 gcc_unreachable ();
15697 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
15699 gimplify_seq_add_stmt (pre_p, stmt);
15700 *expr_p = NULL_TREE;
15703 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
15704 stabilized the lhs of the atomic operation as *ADDR. Return true if
15705 EXPR is this stabilized form. */
15707 static bool
15708 goa_lhs_expr_p (tree expr, tree addr)
15710 /* Also include casts to other type variants. The C front end is fond
15711 of adding these for e.g. volatile variables. This is like
15712 STRIP_TYPE_NOPS but includes the main variant lookup. */
15713 STRIP_USELESS_TYPE_CONVERSION (expr);
15715 if (INDIRECT_REF_P (expr))
15717 expr = TREE_OPERAND (expr, 0);
15718 while (expr != addr
15719 && (CONVERT_EXPR_P (expr)
15720 || TREE_CODE (expr) == NON_LVALUE_EXPR)
15721 && TREE_CODE (expr) == TREE_CODE (addr)
15722 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
15724 expr = TREE_OPERAND (expr, 0);
15725 addr = TREE_OPERAND (addr, 0);
15727 if (expr == addr)
15728 return true;
15729 return (TREE_CODE (addr) == ADDR_EXPR
15730 && TREE_CODE (expr) == ADDR_EXPR
15731 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
15733 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
15734 return true;
15735 return false;
15738 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
15739 expression does not involve the lhs, evaluate it into a temporary.
15740 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
15741 or -1 if an error was encountered. */
15743 static int
15744 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
15745 tree lhs_var, tree &target_expr, bool rhs, int depth)
15747 tree expr = *expr_p;
15748 int saw_lhs = 0;
15750 if (goa_lhs_expr_p (expr, lhs_addr))
15752 if (pre_p)
15753 *expr_p = lhs_var;
15754 return 1;
15756 if (is_gimple_val (expr))
15757 return 0;
15759 /* Maximum depth of lhs in expression is for the
15760 __builtin_clear_padding (...), __builtin_clear_padding (...),
15761 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
15762 if (++depth > 7)
15763 goto finish;
15765 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
15767 case tcc_binary:
15768 case tcc_comparison:
15769 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
15770 lhs_var, target_expr, true, depth);
15771 /* FALLTHRU */
15772 case tcc_unary:
15773 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
15774 lhs_var, target_expr, true, depth);
15775 break;
15776 case tcc_expression:
15777 switch (TREE_CODE (expr))
15779 case TRUTH_ANDIF_EXPR:
15780 case TRUTH_ORIF_EXPR:
15781 case TRUTH_AND_EXPR:
15782 case TRUTH_OR_EXPR:
15783 case TRUTH_XOR_EXPR:
15784 case BIT_INSERT_EXPR:
15785 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15786 lhs_addr, lhs_var, target_expr, true,
15787 depth);
15788 /* FALLTHRU */
15789 case TRUTH_NOT_EXPR:
15790 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15791 lhs_addr, lhs_var, target_expr, true,
15792 depth);
15793 break;
15794 case MODIFY_EXPR:
15795 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
15796 target_expr, true, depth))
15797 break;
15798 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15799 lhs_addr, lhs_var, target_expr, true,
15800 depth);
15801 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15802 lhs_addr, lhs_var, target_expr, false,
15803 depth);
15804 break;
15805 /* FALLTHRU */
15806 case ADDR_EXPR:
15807 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
15808 target_expr, true, depth))
15809 break;
15810 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15811 lhs_addr, lhs_var, target_expr, false,
15812 depth);
15813 break;
15814 case COMPOUND_EXPR:
15815 /* Break out any preevaluations from cp_build_modify_expr. */
15816 for (; TREE_CODE (expr) == COMPOUND_EXPR;
15817 expr = TREE_OPERAND (expr, 1))
15819 /* Special-case __builtin_clear_padding call before
15820 __builtin_memcmp. */
15821 if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
15823 tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
15824 if (fndecl
15825 && fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
15826 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
15827 && (!pre_p
15828 || goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL,
15829 lhs_addr, lhs_var,
15830 target_expr, true, depth)))
15832 if (pre_p)
15833 *expr_p = expr;
15834 saw_lhs = goa_stabilize_expr (&TREE_OPERAND (expr, 0),
15835 pre_p, lhs_addr, lhs_var,
15836 target_expr, true, depth);
15837 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1),
15838 pre_p, lhs_addr, lhs_var,
15839 target_expr, rhs, depth);
15840 return saw_lhs;
15844 if (pre_p)
15845 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
15847 if (!pre_p)
15848 return goa_stabilize_expr (&expr, pre_p, lhs_addr, lhs_var,
15849 target_expr, rhs, depth);
15850 *expr_p = expr;
15851 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
15852 target_expr, rhs, depth);
15853 case COND_EXPR:
15854 if (!goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL, lhs_addr,
15855 lhs_var, target_expr, true, depth))
15856 break;
15857 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15858 lhs_addr, lhs_var, target_expr, true,
15859 depth);
15860 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15861 lhs_addr, lhs_var, target_expr, true,
15862 depth);
15863 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 2), pre_p,
15864 lhs_addr, lhs_var, target_expr, true,
15865 depth);
15866 break;
15867 case TARGET_EXPR:
15868 if (TARGET_EXPR_INITIAL (expr))
15870 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
15871 lhs_var, target_expr, true,
15872 depth))
15873 break;
15874 if (expr == target_expr)
15875 saw_lhs = 1;
15876 else
15878 saw_lhs = goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr),
15879 pre_p, lhs_addr, lhs_var,
15880 target_expr, true, depth);
15881 if (saw_lhs && target_expr == NULL_TREE && pre_p)
15882 target_expr = expr;
15885 break;
15886 default:
15887 break;
15889 break;
15890 case tcc_reference:
15891 if (TREE_CODE (expr) == BIT_FIELD_REF
15892 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
15893 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15894 lhs_addr, lhs_var, target_expr, true,
15895 depth);
15896 break;
15897 case tcc_vl_exp:
15898 if (TREE_CODE (expr) == CALL_EXPR)
15900 if (tree fndecl = get_callee_fndecl (expr))
15901 if (fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING,
15902 BUILT_IN_MEMCMP))
15904 int nargs = call_expr_nargs (expr);
15905 for (int i = 0; i < nargs; i++)
15906 saw_lhs |= goa_stabilize_expr (&CALL_EXPR_ARG (expr, i),
15907 pre_p, lhs_addr, lhs_var,
15908 target_expr, true, depth);
15911 break;
15912 default:
15913 break;
15916 finish:
15917 if (saw_lhs == 0 && pre_p)
15919 enum gimplify_status gs;
15920 if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
15922 gimplify_stmt (&expr, pre_p);
15923 return saw_lhs;
15925 else if (rhs)
15926 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
15927 else
15928 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
15929 if (gs != GS_ALL_DONE)
15930 saw_lhs = -1;
15933 return saw_lhs;
15936 /* Gimplify an OMP_ATOMIC statement. */
15938 static enum gimplify_status
15939 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
15941 tree addr = TREE_OPERAND (*expr_p, 0);
15942 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
15943 ? NULL : TREE_OPERAND (*expr_p, 1);
15944 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
15945 tree tmp_load;
15946 gomp_atomic_load *loadstmt;
15947 gomp_atomic_store *storestmt;
15948 tree target_expr = NULL_TREE;
15950 tmp_load = create_tmp_reg (type);
15951 if (rhs
15952 && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load, target_expr,
15953 true, 0) < 0)
15954 return GS_ERROR;
15956 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
15957 != GS_ALL_DONE)
15958 return GS_ERROR;
15960 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
15961 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
15962 gimplify_seq_add_stmt (pre_p, loadstmt);
15963 if (rhs)
15965 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
15966 representatives. Use BIT_FIELD_REF on the lhs instead. */
15967 tree rhsarg = rhs;
15968 if (TREE_CODE (rhs) == COND_EXPR)
15969 rhsarg = TREE_OPERAND (rhs, 1);
15970 if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
15971 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
15973 tree bitpos = TREE_OPERAND (rhsarg, 2);
15974 tree op1 = TREE_OPERAND (rhsarg, 1);
15975 tree bitsize;
15976 tree tmp_store = tmp_load;
15977 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
15978 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
15979 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
15980 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
15981 else
15982 bitsize = TYPE_SIZE (TREE_TYPE (op1));
15983 gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
15984 tree t = build2_loc (EXPR_LOCATION (rhsarg),
15985 MODIFY_EXPR, void_type_node,
15986 build3_loc (EXPR_LOCATION (rhsarg),
15987 BIT_FIELD_REF, TREE_TYPE (op1),
15988 tmp_store, bitsize, bitpos), op1);
15989 if (TREE_CODE (rhs) == COND_EXPR)
15990 t = build3_loc (EXPR_LOCATION (rhs), COND_EXPR, void_type_node,
15991 TREE_OPERAND (rhs, 0), t, void_node);
15992 gimplify_and_add (t, pre_p);
15993 rhs = tmp_store;
15995 bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
15996 if (TREE_CODE (rhs) == COND_EXPR)
15997 gimplify_ctxp->allow_rhs_cond_expr = true;
15998 enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
15999 is_gimple_val, fb_rvalue);
16000 gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
16001 if (gs != GS_ALL_DONE)
16002 return GS_ERROR;
16005 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
16006 rhs = tmp_load;
16007 storestmt
16008 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
16009 if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
16011 gimple_omp_atomic_set_weak (loadstmt);
16012 gimple_omp_atomic_set_weak (storestmt);
16014 gimplify_seq_add_stmt (pre_p, storestmt);
16015 switch (TREE_CODE (*expr_p))
16017 case OMP_ATOMIC_READ:
16018 case OMP_ATOMIC_CAPTURE_OLD:
16019 *expr_p = tmp_load;
16020 gimple_omp_atomic_set_need_value (loadstmt);
16021 break;
16022 case OMP_ATOMIC_CAPTURE_NEW:
16023 *expr_p = rhs;
16024 gimple_omp_atomic_set_need_value (storestmt);
16025 break;
16026 default:
16027 *expr_p = NULL;
16028 break;
16031 return GS_ALL_DONE;
16034 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
16035 body, and adding some EH bits. */
16037 static enum gimplify_status
16038 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
16040 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
16041 gimple *body_stmt;
16042 gtransaction *trans_stmt;
16043 gimple_seq body = NULL;
16044 int subcode = 0;
16046 /* Wrap the transaction body in a BIND_EXPR so we have a context
16047 where to put decls for OMP. */
16048 if (TREE_CODE (tbody) != BIND_EXPR)
16050 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
16051 TREE_SIDE_EFFECTS (bind) = 1;
16052 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
16053 TRANSACTION_EXPR_BODY (expr) = bind;
16056 push_gimplify_context ();
16057 temp = voidify_wrapper_expr (*expr_p, NULL);
16059 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
16060 pop_gimplify_context (body_stmt);
16062 trans_stmt = gimple_build_transaction (body);
16063 if (TRANSACTION_EXPR_OUTER (expr))
16064 subcode = GTMA_IS_OUTER;
16065 else if (TRANSACTION_EXPR_RELAXED (expr))
16066 subcode = GTMA_IS_RELAXED;
16067 gimple_transaction_set_subcode (trans_stmt, subcode);
16069 gimplify_seq_add_stmt (pre_p, trans_stmt);
16071 if (temp)
16073 *expr_p = temp;
16074 return GS_OK;
16077 *expr_p = NULL_TREE;
16078 return GS_ALL_DONE;
16081 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
16082 is the OMP_BODY of the original EXPR (which has already been
16083 gimplified so it's not present in the EXPR).
16085 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
16087 static gimple *
16088 gimplify_omp_ordered (tree expr, gimple_seq body)
16090 tree c, decls;
16091 int failures = 0;
16092 unsigned int i;
16093 tree source_c = NULL_TREE;
16094 tree sink_c = NULL_TREE;
16096 if (gimplify_omp_ctxp)
16098 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
16099 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16100 && gimplify_omp_ctxp->loop_iter_var.is_empty ())
16102 error_at (OMP_CLAUSE_LOCATION (c),
16103 "%<ordered%> construct with %qs clause must be "
16104 "closely nested inside a loop with %<ordered%> clause",
16105 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross");
16106 failures++;
16108 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16109 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
16111 bool fail = false;
16112 sink_c = c;
16113 if (OMP_CLAUSE_DECL (c) == NULL_TREE)
16114 continue; /* omp_cur_iteration - 1 */
16115 for (decls = OMP_CLAUSE_DECL (c), i = 0;
16116 decls && TREE_CODE (decls) == TREE_LIST;
16117 decls = TREE_CHAIN (decls), ++i)
16118 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
16119 continue;
16120 else if (TREE_VALUE (decls)
16121 != gimplify_omp_ctxp->loop_iter_var[2 * i])
16123 error_at (OMP_CLAUSE_LOCATION (c),
16124 "variable %qE is not an iteration "
16125 "of outermost loop %d, expected %qE",
16126 TREE_VALUE (decls), i + 1,
16127 gimplify_omp_ctxp->loop_iter_var[2 * i]);
16128 fail = true;
16129 failures++;
16131 else
16132 TREE_VALUE (decls)
16133 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
16134 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
16136 error_at (OMP_CLAUSE_LOCATION (c),
16137 "number of variables in %qs clause with "
16138 "%<sink%> modifier does not match number of "
16139 "iteration variables",
16140 OMP_CLAUSE_DOACROSS_DEPEND (c)
16141 ? "depend" : "doacross");
16142 failures++;
16145 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16146 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SOURCE)
16148 if (source_c)
16150 error_at (OMP_CLAUSE_LOCATION (c),
16151 "more than one %qs clause with %<source%> "
16152 "modifier on an %<ordered%> construct",
16153 OMP_CLAUSE_DOACROSS_DEPEND (source_c)
16154 ? "depend" : "doacross");
16155 failures++;
16157 else
16158 source_c = c;
16161 if (source_c && sink_c)
16163 error_at (OMP_CLAUSE_LOCATION (source_c),
16164 "%qs clause with %<source%> modifier specified "
16165 "together with %qs clauses with %<sink%> modifier "
16166 "on the same construct",
16167 OMP_CLAUSE_DOACROSS_DEPEND (source_c) ? "depend" : "doacross",
16168 OMP_CLAUSE_DOACROSS_DEPEND (sink_c) ? "depend" : "doacross");
16169 failures++;
16172 if (failures)
16173 return gimple_build_nop ();
16174 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
16177 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
16178 expression produces a value to be used as an operand inside a GIMPLE
16179 statement, the value will be stored back in *EXPR_P. This value will
16180 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
16181 an SSA_NAME. The corresponding sequence of GIMPLE statements is
16182 emitted in PRE_P and POST_P.
16184 Additionally, this process may overwrite parts of the input
16185 expression during gimplification. Ideally, it should be
16186 possible to do non-destructive gimplification.
16188 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
16189 the expression needs to evaluate to a value to be used as
16190 an operand in a GIMPLE statement, this value will be stored in
16191 *EXPR_P on exit. This happens when the caller specifies one
16192 of fb_lvalue or fb_rvalue fallback flags.
16194 PRE_P will contain the sequence of GIMPLE statements corresponding
16195 to the evaluation of EXPR and all the side-effects that must
16196 be executed before the main expression. On exit, the last
16197 statement of PRE_P is the core statement being gimplified. For
16198 instance, when gimplifying 'if (++a)' the last statement in
16199 PRE_P will be 'if (t.1)' where t.1 is the result of
16200 pre-incrementing 'a'.
16202 POST_P will contain the sequence of GIMPLE statements corresponding
16203 to the evaluation of all the side-effects that must be executed
16204 after the main expression. If this is NULL, the post
16205 side-effects are stored at the end of PRE_P.
16207 The reason why the output is split in two is to handle post
16208 side-effects explicitly. In some cases, an expression may have
16209 inner and outer post side-effects which need to be emitted in
16210 an order different from the one given by the recursive
16211 traversal. For instance, for the expression (*p--)++ the post
16212 side-effects of '--' must actually occur *after* the post
16213 side-effects of '++'. However, gimplification will first visit
16214 the inner expression, so if a separate POST sequence was not
16215 used, the resulting sequence would be:
16217 1 t.1 = *p
16218 2 p = p - 1
16219 3 t.2 = t.1 + 1
16220 4 *p = t.2
16222 However, the post-decrement operation in line #2 must not be
16223 evaluated until after the store to *p at line #4, so the
16224 correct sequence should be:
16226 1 t.1 = *p
16227 2 t.2 = t.1 + 1
16228 3 *p = t.2
16229 4 p = p - 1
16231 So, by specifying a separate post queue, it is possible
16232 to emit the post side-effects in the correct order.
16233 If POST_P is NULL, an internal queue will be used. Before
16234 returning to the caller, the sequence POST_P is appended to
16235 the main output sequence PRE_P.
16237 GIMPLE_TEST_F points to a function that takes a tree T and
16238 returns nonzero if T is in the GIMPLE form requested by the
16239 caller. The GIMPLE predicates are in gimple.cc.
16241 FALLBACK tells the function what sort of a temporary we want if
16242 gimplification cannot produce an expression that complies with
16243 GIMPLE_TEST_F.
16245 fb_none means that no temporary should be generated
16246 fb_rvalue means that an rvalue is OK to generate
16247 fb_lvalue means that an lvalue is OK to generate
16248 fb_either means that either is OK, but an lvalue is preferable.
16249 fb_mayfail means that gimplification may fail (in which case
16250 GS_ERROR will be returned)
16252 The return value is either GS_ERROR or GS_ALL_DONE, since this
16253 function iterates until EXPR is completely gimplified or an error
16254 occurs. */
16256 enum gimplify_status
16257 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
16258 bool (*gimple_test_f) (tree), fallback_t fallback)
16260 tree tmp;
16261 gimple_seq internal_pre = NULL;
16262 gimple_seq internal_post = NULL;
16263 tree save_expr;
16264 bool is_statement;
16265 location_t saved_location;
16266 enum gimplify_status ret;
16267 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
16268 tree label;
16270 save_expr = *expr_p;
16271 if (save_expr == NULL_TREE)
16272 return GS_ALL_DONE;
16274 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
16275 is_statement = gimple_test_f == is_gimple_stmt;
16276 if (is_statement)
16277 gcc_assert (pre_p);
16279 /* Consistency checks. */
16280 if (gimple_test_f == is_gimple_reg)
16281 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
16282 else if (gimple_test_f == is_gimple_val
16283 || gimple_test_f == is_gimple_call_addr
16284 || gimple_test_f == is_gimple_condexpr_for_cond
16285 || gimple_test_f == is_gimple_mem_rhs
16286 || gimple_test_f == is_gimple_mem_rhs_or_call
16287 || gimple_test_f == is_gimple_reg_rhs
16288 || gimple_test_f == is_gimple_reg_rhs_or_call
16289 || gimple_test_f == is_gimple_asm_val
16290 || gimple_test_f == is_gimple_mem_ref_addr)
16291 gcc_assert (fallback & fb_rvalue);
16292 else if (gimple_test_f == is_gimple_min_lval
16293 || gimple_test_f == is_gimple_lvalue)
16294 gcc_assert (fallback & fb_lvalue);
16295 else if (gimple_test_f == is_gimple_addressable)
16296 gcc_assert (fallback & fb_either);
16297 else if (gimple_test_f == is_gimple_stmt)
16298 gcc_assert (fallback == fb_none);
16299 else
16301 /* We should have recognized the GIMPLE_TEST_F predicate to
16302 know what kind of fallback to use in case a temporary is
16303 needed to hold the value or address of *EXPR_P. */
16304 gcc_unreachable ();
16307 /* We used to check the predicate here and return immediately if it
16308 succeeds. This is wrong; the design is for gimplification to be
16309 idempotent, and for the predicates to only test for valid forms, not
16310 whether they are fully simplified. */
16311 if (pre_p == NULL)
16312 pre_p = &internal_pre;
16314 if (post_p == NULL)
16315 post_p = &internal_post;
16317 /* Remember the last statements added to PRE_P and POST_P. Every
16318 new statement added by the gimplification helpers needs to be
16319 annotated with location information. To centralize the
16320 responsibility, we remember the last statement that had been
16321 added to both queues before gimplifying *EXPR_P. If
16322 gimplification produces new statements in PRE_P and POST_P, those
16323 statements will be annotated with the same location information
16324 as *EXPR_P. */
16325 pre_last_gsi = gsi_last (*pre_p);
16326 post_last_gsi = gsi_last (*post_p);
16328 saved_location = input_location;
16329 if (save_expr != error_mark_node
16330 && EXPR_HAS_LOCATION (*expr_p))
16331 input_location = EXPR_LOCATION (*expr_p);
16333 /* Loop over the specific gimplifiers until the toplevel node
16334 remains the same. */
16337 /* Strip away as many useless type conversions as possible
16338 at the toplevel. */
16339 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
16341 /* Remember the expr. */
16342 save_expr = *expr_p;
16344 /* Die, die, die, my darling. */
16345 if (error_operand_p (save_expr))
16347 ret = GS_ERROR;
16348 break;
16351 /* Do any language-specific gimplification. */
16352 ret = ((enum gimplify_status)
16353 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
16354 if (ret == GS_OK)
16356 if (*expr_p == NULL_TREE)
16357 break;
16358 if (*expr_p != save_expr)
16359 continue;
16361 else if (ret != GS_UNHANDLED)
16362 break;
16364 /* Make sure that all the cases set 'ret' appropriately. */
16365 ret = GS_UNHANDLED;
16366 switch (TREE_CODE (*expr_p))
16368 /* First deal with the special cases. */
16370 case POSTINCREMENT_EXPR:
16371 case POSTDECREMENT_EXPR:
16372 case PREINCREMENT_EXPR:
16373 case PREDECREMENT_EXPR:
16374 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
16375 fallback != fb_none,
16376 TREE_TYPE (*expr_p));
16377 break;
16379 case VIEW_CONVERT_EXPR:
16380 if ((fallback & fb_rvalue)
16381 && is_gimple_reg_type (TREE_TYPE (*expr_p))
16382 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
16384 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16385 post_p, is_gimple_val, fb_rvalue);
16386 recalculate_side_effects (*expr_p);
16387 break;
16389 /* Fallthru. */
16391 case ARRAY_REF:
16392 case ARRAY_RANGE_REF:
16393 case REALPART_EXPR:
16394 case IMAGPART_EXPR:
16395 case COMPONENT_REF:
16396 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
16397 fallback ? fallback : fb_rvalue);
16398 break;
16400 case COND_EXPR:
16401 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
16403 /* C99 code may assign to an array in a structure value of a
16404 conditional expression, and this has undefined behavior
16405 only on execution, so create a temporary if an lvalue is
16406 required. */
16407 if (fallback == fb_lvalue)
16409 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16410 mark_addressable (*expr_p);
16411 ret = GS_OK;
16413 break;
16415 case CALL_EXPR:
16416 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
16418 /* C99 code may assign to an array in a structure returned
16419 from a function, and this has undefined behavior only on
16420 execution, so create a temporary if an lvalue is
16421 required. */
16422 if (fallback == fb_lvalue)
16424 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16425 mark_addressable (*expr_p);
16426 ret = GS_OK;
16428 break;
16430 case TREE_LIST:
16431 gcc_unreachable ();
16433 case COMPOUND_EXPR:
16434 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
16435 break;
16437 case COMPOUND_LITERAL_EXPR:
16438 ret = gimplify_compound_literal_expr (expr_p, pre_p,
16439 gimple_test_f, fallback);
16440 break;
16442 case MODIFY_EXPR:
16443 case INIT_EXPR:
16444 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
16445 fallback != fb_none);
16446 break;
16448 case TRUTH_ANDIF_EXPR:
16449 case TRUTH_ORIF_EXPR:
16451 /* Preserve the original type of the expression and the
16452 source location of the outer expression. */
16453 tree org_type = TREE_TYPE (*expr_p);
16454 *expr_p = gimple_boolify (*expr_p);
16455 *expr_p = build3_loc (input_location, COND_EXPR,
16456 org_type, *expr_p,
16457 fold_convert_loc
16458 (input_location,
16459 org_type, boolean_true_node),
16460 fold_convert_loc
16461 (input_location,
16462 org_type, boolean_false_node));
16463 ret = GS_OK;
16464 break;
16467 case TRUTH_NOT_EXPR:
16469 tree type = TREE_TYPE (*expr_p);
16470 /* The parsers are careful to generate TRUTH_NOT_EXPR
16471 only with operands that are always zero or one.
16472 We do not fold here but handle the only interesting case
16473 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
16474 *expr_p = gimple_boolify (*expr_p);
16475 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
16476 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
16477 TREE_TYPE (*expr_p),
16478 TREE_OPERAND (*expr_p, 0));
16479 else
16480 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
16481 TREE_TYPE (*expr_p),
16482 TREE_OPERAND (*expr_p, 0),
16483 build_int_cst (TREE_TYPE (*expr_p), 1));
16484 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
16485 *expr_p = fold_convert_loc (input_location, type, *expr_p);
16486 ret = GS_OK;
16487 break;
16490 case ADDR_EXPR:
16491 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
16492 break;
16494 case ANNOTATE_EXPR:
16496 tree cond = TREE_OPERAND (*expr_p, 0);
16497 tree kind = TREE_OPERAND (*expr_p, 1);
16498 tree data = TREE_OPERAND (*expr_p, 2);
16499 tree type = TREE_TYPE (cond);
16500 if (!INTEGRAL_TYPE_P (type))
16502 *expr_p = cond;
16503 ret = GS_OK;
16504 break;
16506 tree tmp = create_tmp_var (type);
16507 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
16508 gcall *call
16509 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
16510 gimple_call_set_lhs (call, tmp);
16511 gimplify_seq_add_stmt (pre_p, call);
16512 *expr_p = tmp;
16513 ret = GS_ALL_DONE;
16514 break;
16517 case VA_ARG_EXPR:
16518 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
16519 break;
16521 CASE_CONVERT:
16522 if (IS_EMPTY_STMT (*expr_p))
16524 ret = GS_ALL_DONE;
16525 break;
16528 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
16529 || fallback == fb_none)
16531 /* Just strip a conversion to void (or in void context) and
16532 try again. */
16533 *expr_p = TREE_OPERAND (*expr_p, 0);
16534 ret = GS_OK;
16535 break;
16538 ret = gimplify_conversion (expr_p);
16539 if (ret == GS_ERROR)
16540 break;
16541 if (*expr_p != save_expr)
16542 break;
16543 /* FALLTHRU */
16545 case FIX_TRUNC_EXPR:
16546 /* unary_expr: ... | '(' cast ')' val | ... */
16547 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16548 is_gimple_val, fb_rvalue);
16549 recalculate_side_effects (*expr_p);
16550 break;
16552 case INDIRECT_REF:
16554 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
16555 bool notrap = TREE_THIS_NOTRAP (*expr_p);
16556 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
16558 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
16559 if (*expr_p != save_expr)
16561 ret = GS_OK;
16562 break;
16565 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16566 is_gimple_reg, fb_rvalue);
16567 if (ret == GS_ERROR)
16568 break;
16570 recalculate_side_effects (*expr_p);
16571 *expr_p = fold_build2_loc (input_location, MEM_REF,
16572 TREE_TYPE (*expr_p),
16573 TREE_OPERAND (*expr_p, 0),
16574 build_int_cst (saved_ptr_type, 0));
16575 TREE_THIS_VOLATILE (*expr_p) = volatilep;
16576 TREE_THIS_NOTRAP (*expr_p) = notrap;
16577 ret = GS_OK;
16578 break;
16581 /* We arrive here through the various re-gimplifcation paths. */
16582 case MEM_REF:
16583 /* First try re-folding the whole thing. */
16584 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
16585 TREE_OPERAND (*expr_p, 0),
16586 TREE_OPERAND (*expr_p, 1));
16587 if (tmp)
16589 REF_REVERSE_STORAGE_ORDER (tmp)
16590 = REF_REVERSE_STORAGE_ORDER (*expr_p);
16591 *expr_p = tmp;
16592 recalculate_side_effects (*expr_p);
16593 ret = GS_OK;
16594 break;
16596 /* Avoid re-gimplifying the address operand if it is already
16597 in suitable form. Re-gimplifying would mark the address
16598 operand addressable. Always gimplify when not in SSA form
16599 as we still may have to gimplify decls with value-exprs. */
16600 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
16601 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
16603 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16604 is_gimple_mem_ref_addr, fb_rvalue);
16605 if (ret == GS_ERROR)
16606 break;
16608 recalculate_side_effects (*expr_p);
16609 ret = GS_ALL_DONE;
16610 break;
16612 /* Constants need not be gimplified. */
16613 case INTEGER_CST:
16614 case REAL_CST:
16615 case FIXED_CST:
16616 case STRING_CST:
16617 case COMPLEX_CST:
16618 case VECTOR_CST:
16619 /* Drop the overflow flag on constants, we do not want
16620 that in the GIMPLE IL. */
16621 if (TREE_OVERFLOW_P (*expr_p))
16622 *expr_p = drop_tree_overflow (*expr_p);
16623 ret = GS_ALL_DONE;
16624 break;
16626 case CONST_DECL:
16627 /* If we require an lvalue, such as for ADDR_EXPR, retain the
16628 CONST_DECL node. Otherwise the decl is replaceable by its
16629 value. */
16630 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
16631 if (fallback & fb_lvalue)
16632 ret = GS_ALL_DONE;
16633 else
16635 *expr_p = DECL_INITIAL (*expr_p);
16636 ret = GS_OK;
16638 break;
16640 case DECL_EXPR:
16641 ret = gimplify_decl_expr (expr_p, pre_p);
16642 break;
16644 case BIND_EXPR:
16645 ret = gimplify_bind_expr (expr_p, pre_p);
16646 break;
16648 case LOOP_EXPR:
16649 ret = gimplify_loop_expr (expr_p, pre_p);
16650 break;
16652 case SWITCH_EXPR:
16653 ret = gimplify_switch_expr (expr_p, pre_p);
16654 break;
16656 case EXIT_EXPR:
16657 ret = gimplify_exit_expr (expr_p);
16658 break;
16660 case GOTO_EXPR:
16661 /* If the target is not LABEL, then it is a computed jump
16662 and the target needs to be gimplified. */
16663 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
16665 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
16666 NULL, is_gimple_val, fb_rvalue);
16667 if (ret == GS_ERROR)
16668 break;
16670 gimplify_seq_add_stmt (pre_p,
16671 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
16672 ret = GS_ALL_DONE;
16673 break;
16675 case PREDICT_EXPR:
16676 gimplify_seq_add_stmt (pre_p,
16677 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
16678 PREDICT_EXPR_OUTCOME (*expr_p)));
16679 ret = GS_ALL_DONE;
16680 break;
16682 case LABEL_EXPR:
16683 ret = gimplify_label_expr (expr_p, pre_p);
16684 label = LABEL_EXPR_LABEL (*expr_p);
16685 gcc_assert (decl_function_context (label) == current_function_decl);
16687 /* If the label is used in a goto statement, or address of the label
16688 is taken, we need to unpoison all variables that were seen so far.
16689 Doing so would prevent us from reporting a false positives. */
16690 if (asan_poisoned_variables
16691 && asan_used_labels != NULL
16692 && asan_used_labels->contains (label)
16693 && !gimplify_omp_ctxp)
16694 asan_poison_variables (asan_poisoned_variables, false, pre_p);
16695 break;
16697 case CASE_LABEL_EXPR:
16698 ret = gimplify_case_label_expr (expr_p, pre_p);
16700 if (gimplify_ctxp->live_switch_vars)
16701 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
16702 pre_p);
16703 break;
16705 case RETURN_EXPR:
16706 ret = gimplify_return_expr (*expr_p, pre_p);
16707 break;
16709 case CONSTRUCTOR:
16710 /* Don't reduce this in place; let gimplify_init_constructor work its
16711 magic. Buf if we're just elaborating this for side effects, just
16712 gimplify any element that has side-effects. */
16713 if (fallback == fb_none)
16715 unsigned HOST_WIDE_INT ix;
16716 tree val;
16717 tree temp = NULL_TREE;
16718 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
16719 if (TREE_SIDE_EFFECTS (val))
16720 append_to_statement_list (val, &temp);
16722 *expr_p = temp;
16723 ret = temp ? GS_OK : GS_ALL_DONE;
16725 /* C99 code may assign to an array in a constructed
16726 structure or union, and this has undefined behavior only
16727 on execution, so create a temporary if an lvalue is
16728 required. */
16729 else if (fallback == fb_lvalue)
16731 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16732 mark_addressable (*expr_p);
16733 ret = GS_OK;
16735 else
16736 ret = GS_ALL_DONE;
16737 break;
16739 /* The following are special cases that are not handled by the
16740 original GIMPLE grammar. */
16742 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
16743 eliminated. */
16744 case SAVE_EXPR:
16745 ret = gimplify_save_expr (expr_p, pre_p, post_p);
16746 break;
16748 case BIT_FIELD_REF:
16749 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16750 post_p, is_gimple_lvalue, fb_either);
16751 recalculate_side_effects (*expr_p);
16752 break;
16754 case TARGET_MEM_REF:
16756 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
16758 if (TMR_BASE (*expr_p))
16759 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
16760 post_p, is_gimple_mem_ref_addr, fb_either);
16761 if (TMR_INDEX (*expr_p))
16762 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
16763 post_p, is_gimple_val, fb_rvalue);
16764 if (TMR_INDEX2 (*expr_p))
16765 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
16766 post_p, is_gimple_val, fb_rvalue);
16767 /* TMR_STEP and TMR_OFFSET are always integer constants. */
16768 ret = MIN (r0, r1);
16770 break;
16772 case NON_LVALUE_EXPR:
16773 /* This should have been stripped above. */
16774 gcc_unreachable ();
16776 case ASM_EXPR:
16777 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
16778 break;
16780 case TRY_FINALLY_EXPR:
16781 case TRY_CATCH_EXPR:
16783 gimple_seq eval, cleanup;
16784 gtry *try_;
16786 /* Calls to destructors are generated automatically in FINALLY/CATCH
16787 block. They should have location as UNKNOWN_LOCATION. However,
16788 gimplify_call_expr will reset these call stmts to input_location
16789 if it finds stmt's location is unknown. To prevent resetting for
16790 destructors, we set the input_location to unknown.
16791 Note that this only affects the destructor calls in FINALLY/CATCH
16792 block, and will automatically reset to its original value by the
16793 end of gimplify_expr. */
16794 input_location = UNKNOWN_LOCATION;
16795 eval = cleanup = NULL;
16796 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
16797 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
16798 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
16800 gimple_seq n = NULL, e = NULL;
16801 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
16802 0), &n);
16803 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
16804 1), &e);
16805 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
16807 geh_else *stmt = gimple_build_eh_else (n, e);
16808 gimple_seq_add_stmt (&cleanup, stmt);
16811 else
16812 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
16813 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
16814 if (gimple_seq_empty_p (cleanup))
16816 gimple_seq_add_seq (pre_p, eval);
16817 ret = GS_ALL_DONE;
16818 break;
16820 try_ = gimple_build_try (eval, cleanup,
16821 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
16822 ? GIMPLE_TRY_FINALLY
16823 : GIMPLE_TRY_CATCH);
16824 if (EXPR_HAS_LOCATION (save_expr))
16825 gimple_set_location (try_, EXPR_LOCATION (save_expr));
16826 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
16827 gimple_set_location (try_, saved_location);
16828 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
16829 gimple_try_set_catch_is_cleanup (try_,
16830 TRY_CATCH_IS_CLEANUP (*expr_p));
16831 gimplify_seq_add_stmt (pre_p, try_);
16832 ret = GS_ALL_DONE;
16833 break;
16836 case CLEANUP_POINT_EXPR:
16837 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
16838 break;
16840 case TARGET_EXPR:
16841 ret = gimplify_target_expr (expr_p, pre_p, post_p);
16842 break;
16844 case CATCH_EXPR:
16846 gimple *c;
16847 gimple_seq handler = NULL;
16848 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
16849 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
16850 gimplify_seq_add_stmt (pre_p, c);
16851 ret = GS_ALL_DONE;
16852 break;
16855 case EH_FILTER_EXPR:
16857 gimple *ehf;
16858 gimple_seq failure = NULL;
16860 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
16861 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
16862 copy_warning (ehf, *expr_p);
16863 gimplify_seq_add_stmt (pre_p, ehf);
16864 ret = GS_ALL_DONE;
16865 break;
16868 case OBJ_TYPE_REF:
16870 enum gimplify_status r0, r1;
16871 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
16872 post_p, is_gimple_val, fb_rvalue);
16873 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
16874 post_p, is_gimple_val, fb_rvalue);
16875 TREE_SIDE_EFFECTS (*expr_p) = 0;
16876 ret = MIN (r0, r1);
16878 break;
16880 case LABEL_DECL:
16881 /* We get here when taking the address of a label. We mark
16882 the label as "forced"; meaning it can never be removed and
16883 it is a potential target for any computed goto. */
16884 FORCED_LABEL (*expr_p) = 1;
16885 ret = GS_ALL_DONE;
16886 break;
16888 case STATEMENT_LIST:
16889 ret = gimplify_statement_list (expr_p, pre_p);
16890 break;
16892 case WITH_SIZE_EXPR:
16894 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16895 post_p == &internal_post ? NULL : post_p,
16896 gimple_test_f, fallback);
16897 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
16898 is_gimple_val, fb_rvalue);
16899 ret = GS_ALL_DONE;
16901 break;
16903 case VAR_DECL:
16904 case PARM_DECL:
16905 ret = gimplify_var_or_parm_decl (expr_p);
16906 break;
16908 case RESULT_DECL:
16909 /* When within an OMP context, notice uses of variables. */
16910 if (gimplify_omp_ctxp)
16911 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
16912 ret = GS_ALL_DONE;
16913 break;
16915 case DEBUG_EXPR_DECL:
16916 gcc_unreachable ();
16918 case DEBUG_BEGIN_STMT:
16919 gimplify_seq_add_stmt (pre_p,
16920 gimple_build_debug_begin_stmt
16921 (TREE_BLOCK (*expr_p),
16922 EXPR_LOCATION (*expr_p)));
16923 ret = GS_ALL_DONE;
16924 *expr_p = NULL;
16925 break;
16927 case SSA_NAME:
16928 /* Allow callbacks into the gimplifier during optimization. */
16929 ret = GS_ALL_DONE;
16930 break;
16932 case OMP_PARALLEL:
16933 gimplify_omp_parallel (expr_p, pre_p);
16934 ret = GS_ALL_DONE;
16935 break;
16937 case OMP_TASK:
16938 gimplify_omp_task (expr_p, pre_p);
16939 ret = GS_ALL_DONE;
16940 break;
16942 case OMP_SIMD:
16944 /* Temporarily disable into_ssa, as scan_omp_simd
16945 which calls copy_gimple_seq_and_replace_locals can't deal
16946 with SSA_NAMEs defined outside of the body properly. */
16947 bool saved_into_ssa = gimplify_ctxp->into_ssa;
16948 gimplify_ctxp->into_ssa = false;
16949 ret = gimplify_omp_for (expr_p, pre_p);
16950 gimplify_ctxp->into_ssa = saved_into_ssa;
16951 break;
16954 case OMP_FOR:
16955 case OMP_DISTRIBUTE:
16956 case OMP_TASKLOOP:
16957 case OACC_LOOP:
16958 ret = gimplify_omp_for (expr_p, pre_p);
16959 break;
16961 case OMP_LOOP:
16962 ret = gimplify_omp_loop (expr_p, pre_p);
16963 break;
16965 case OACC_CACHE:
16966 gimplify_oacc_cache (expr_p, pre_p);
16967 ret = GS_ALL_DONE;
16968 break;
16970 case OACC_DECLARE:
16971 gimplify_oacc_declare (expr_p, pre_p);
16972 ret = GS_ALL_DONE;
16973 break;
16975 case OACC_HOST_DATA:
16976 case OACC_DATA:
16977 case OACC_KERNELS:
16978 case OACC_PARALLEL:
16979 case OACC_SERIAL:
16980 case OMP_SCOPE:
16981 case OMP_SECTIONS:
16982 case OMP_SINGLE:
16983 case OMP_TARGET:
16984 case OMP_TARGET_DATA:
16985 case OMP_TEAMS:
16986 gimplify_omp_workshare (expr_p, pre_p);
16987 ret = GS_ALL_DONE;
16988 break;
16990 case OACC_ENTER_DATA:
16991 case OACC_EXIT_DATA:
16992 case OACC_UPDATE:
16993 case OMP_TARGET_UPDATE:
16994 case OMP_TARGET_ENTER_DATA:
16995 case OMP_TARGET_EXIT_DATA:
16996 gimplify_omp_target_update (expr_p, pre_p);
16997 ret = GS_ALL_DONE;
16998 break;
17000 case OMP_SECTION:
17001 case OMP_MASTER:
17002 case OMP_MASKED:
17003 case OMP_ORDERED:
17004 case OMP_CRITICAL:
17005 case OMP_SCAN:
17007 gimple_seq body = NULL;
17008 gimple *g;
17009 bool saved_in_omp_construct = in_omp_construct;
17011 in_omp_construct = true;
17012 gimplify_and_add (OMP_BODY (*expr_p), &body);
17013 in_omp_construct = saved_in_omp_construct;
17014 switch (TREE_CODE (*expr_p))
17016 case OMP_SECTION:
17017 g = gimple_build_omp_section (body);
17018 break;
17019 case OMP_MASTER:
17020 g = gimple_build_omp_master (body);
17021 break;
17022 case OMP_ORDERED:
17023 g = gimplify_omp_ordered (*expr_p, body);
17024 if (OMP_BODY (*expr_p) == NULL_TREE
17025 && gimple_code (g) == GIMPLE_OMP_ORDERED)
17026 gimple_omp_ordered_standalone (g);
17027 break;
17028 case OMP_MASKED:
17029 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p),
17030 pre_p, ORT_WORKSHARE, OMP_MASKED);
17031 gimplify_adjust_omp_clauses (pre_p, body,
17032 &OMP_MASKED_CLAUSES (*expr_p),
17033 OMP_MASKED);
17034 g = gimple_build_omp_masked (body,
17035 OMP_MASKED_CLAUSES (*expr_p));
17036 break;
17037 case OMP_CRITICAL:
17038 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
17039 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
17040 gimplify_adjust_omp_clauses (pre_p, body,
17041 &OMP_CRITICAL_CLAUSES (*expr_p),
17042 OMP_CRITICAL);
17043 g = gimple_build_omp_critical (body,
17044 OMP_CRITICAL_NAME (*expr_p),
17045 OMP_CRITICAL_CLAUSES (*expr_p));
17046 break;
17047 case OMP_SCAN:
17048 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
17049 pre_p, ORT_WORKSHARE, OMP_SCAN);
17050 gimplify_adjust_omp_clauses (pre_p, body,
17051 &OMP_SCAN_CLAUSES (*expr_p),
17052 OMP_SCAN);
17053 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
17054 break;
17055 default:
17056 gcc_unreachable ();
17058 gimplify_seq_add_stmt (pre_p, g);
17059 ret = GS_ALL_DONE;
17060 break;
17063 case OMP_TASKGROUP:
17065 gimple_seq body = NULL;
17067 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
17068 bool saved_in_omp_construct = in_omp_construct;
17069 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
17070 OMP_TASKGROUP);
17071 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
17073 in_omp_construct = true;
17074 gimplify_and_add (OMP_BODY (*expr_p), &body);
17075 in_omp_construct = saved_in_omp_construct;
17076 gimple_seq cleanup = NULL;
17077 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
17078 gimple *g = gimple_build_call (fn, 0);
17079 gimple_seq_add_stmt (&cleanup, g);
17080 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
17081 body = NULL;
17082 gimple_seq_add_stmt (&body, g);
17083 g = gimple_build_omp_taskgroup (body, *pclauses);
17084 gimplify_seq_add_stmt (pre_p, g);
17085 ret = GS_ALL_DONE;
17086 break;
17089 case OMP_ATOMIC:
17090 case OMP_ATOMIC_READ:
17091 case OMP_ATOMIC_CAPTURE_OLD:
17092 case OMP_ATOMIC_CAPTURE_NEW:
17093 ret = gimplify_omp_atomic (expr_p, pre_p);
17094 break;
17096 case TRANSACTION_EXPR:
17097 ret = gimplify_transaction (expr_p, pre_p);
17098 break;
17100 case TRUTH_AND_EXPR:
17101 case TRUTH_OR_EXPR:
17102 case TRUTH_XOR_EXPR:
17104 tree orig_type = TREE_TYPE (*expr_p);
17105 tree new_type, xop0, xop1;
17106 *expr_p = gimple_boolify (*expr_p);
17107 new_type = TREE_TYPE (*expr_p);
17108 if (!useless_type_conversion_p (orig_type, new_type))
17110 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
17111 ret = GS_OK;
17112 break;
17115 /* Boolified binary truth expressions are semantically equivalent
17116 to bitwise binary expressions. Canonicalize them to the
17117 bitwise variant. */
17118 switch (TREE_CODE (*expr_p))
17120 case TRUTH_AND_EXPR:
17121 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
17122 break;
17123 case TRUTH_OR_EXPR:
17124 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
17125 break;
17126 case TRUTH_XOR_EXPR:
17127 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
17128 break;
17129 default:
17130 break;
17132 /* Now make sure that operands have compatible type to
17133 expression's new_type. */
17134 xop0 = TREE_OPERAND (*expr_p, 0);
17135 xop1 = TREE_OPERAND (*expr_p, 1);
17136 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
17137 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
17138 new_type,
17139 xop0);
17140 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
17141 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
17142 new_type,
17143 xop1);
17144 /* Continue classified as tcc_binary. */
17145 goto expr_2;
17148 case VEC_COND_EXPR:
17149 goto expr_3;
17151 case VEC_PERM_EXPR:
17152 /* Classified as tcc_expression. */
17153 goto expr_3;
17155 case BIT_INSERT_EXPR:
17156 /* Argument 3 is a constant. */
17157 goto expr_2;
17159 case POINTER_PLUS_EXPR:
17161 enum gimplify_status r0, r1;
17162 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17163 post_p, is_gimple_val, fb_rvalue);
17164 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17165 post_p, is_gimple_val, fb_rvalue);
17166 recalculate_side_effects (*expr_p);
17167 ret = MIN (r0, r1);
17168 break;
17171 default:
17172 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
17174 case tcc_comparison:
17175 /* Handle comparison of objects of non scalar mode aggregates
17176 with a call to memcmp. It would be nice to only have to do
17177 this for variable-sized objects, but then we'd have to allow
17178 the same nest of reference nodes we allow for MODIFY_EXPR and
17179 that's too complex.
17181 Compare scalar mode aggregates as scalar mode values. Using
17182 memcmp for them would be very inefficient at best, and is
17183 plain wrong if bitfields are involved. */
17184 if (error_operand_p (TREE_OPERAND (*expr_p, 1)))
17185 ret = GS_ERROR;
17186 else
17188 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
17190 /* Vector comparisons need no boolification. */
17191 if (TREE_CODE (type) == VECTOR_TYPE)
17192 goto expr_2;
17193 else if (!AGGREGATE_TYPE_P (type))
17195 tree org_type = TREE_TYPE (*expr_p);
17196 *expr_p = gimple_boolify (*expr_p);
17197 if (!useless_type_conversion_p (org_type,
17198 TREE_TYPE (*expr_p)))
17200 *expr_p = fold_convert_loc (input_location,
17201 org_type, *expr_p);
17202 ret = GS_OK;
17204 else
17205 goto expr_2;
17207 else if (TYPE_MODE (type) != BLKmode)
17208 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
17209 else
17210 ret = gimplify_variable_sized_compare (expr_p);
17212 break;
17214 /* If *EXPR_P does not need to be special-cased, handle it
17215 according to its class. */
17216 case tcc_unary:
17217 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17218 post_p, is_gimple_val, fb_rvalue);
17219 break;
17221 case tcc_binary:
17222 expr_2:
17224 enum gimplify_status r0, r1;
17226 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17227 post_p, is_gimple_val, fb_rvalue);
17228 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17229 post_p, is_gimple_val, fb_rvalue);
17231 ret = MIN (r0, r1);
17232 break;
17235 expr_3:
17237 enum gimplify_status r0, r1, r2;
17239 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17240 post_p, is_gimple_val, fb_rvalue);
17241 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17242 post_p, is_gimple_val, fb_rvalue);
17243 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
17244 post_p, is_gimple_val, fb_rvalue);
17246 ret = MIN (MIN (r0, r1), r2);
17247 break;
17250 case tcc_declaration:
17251 case tcc_constant:
17252 ret = GS_ALL_DONE;
17253 goto dont_recalculate;
17255 default:
17256 gcc_unreachable ();
17259 recalculate_side_effects (*expr_p);
17261 dont_recalculate:
17262 break;
17265 gcc_assert (*expr_p || ret != GS_OK);
17267 while (ret == GS_OK);
17269 /* If we encountered an error_mark somewhere nested inside, either
17270 stub out the statement or propagate the error back out. */
17271 if (ret == GS_ERROR)
17273 if (is_statement)
17274 *expr_p = NULL;
17275 goto out;
17278 /* This was only valid as a return value from the langhook, which
17279 we handled. Make sure it doesn't escape from any other context. */
17280 gcc_assert (ret != GS_UNHANDLED);
17282 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
17284 /* We aren't looking for a value, and we don't have a valid
17285 statement. If it doesn't have side-effects, throw it away.
17286 We can also get here with code such as "*&&L;", where L is
17287 a LABEL_DECL that is marked as FORCED_LABEL. */
17288 if (TREE_CODE (*expr_p) == LABEL_DECL
17289 || !TREE_SIDE_EFFECTS (*expr_p))
17290 *expr_p = NULL;
17291 else if (!TREE_THIS_VOLATILE (*expr_p))
17293 /* This is probably a _REF that contains something nested that
17294 has side effects. Recurse through the operands to find it. */
17295 enum tree_code code = TREE_CODE (*expr_p);
17297 switch (code)
17299 case COMPONENT_REF:
17300 case REALPART_EXPR:
17301 case IMAGPART_EXPR:
17302 case VIEW_CONVERT_EXPR:
17303 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17304 gimple_test_f, fallback);
17305 break;
17307 case ARRAY_REF:
17308 case ARRAY_RANGE_REF:
17309 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17310 gimple_test_f, fallback);
17311 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
17312 gimple_test_f, fallback);
17313 break;
17315 default:
17316 /* Anything else with side-effects must be converted to
17317 a valid statement before we get here. */
17318 gcc_unreachable ();
17321 *expr_p = NULL;
17323 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
17324 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
17325 && !is_empty_type (TREE_TYPE (*expr_p)))
17327 /* Historically, the compiler has treated a bare reference
17328 to a non-BLKmode volatile lvalue as forcing a load. */
17329 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
17331 /* Normally, we do not want to create a temporary for a
17332 TREE_ADDRESSABLE type because such a type should not be
17333 copied by bitwise-assignment. However, we make an
17334 exception here, as all we are doing here is ensuring that
17335 we read the bytes that make up the type. We use
17336 create_tmp_var_raw because create_tmp_var will abort when
17337 given a TREE_ADDRESSABLE type. */
17338 tree tmp = create_tmp_var_raw (type, "vol");
17339 gimple_add_tmp_var (tmp);
17340 gimplify_assign (tmp, *expr_p, pre_p);
17341 *expr_p = NULL;
17343 else
17344 /* We can't do anything useful with a volatile reference to
17345 an incomplete type, so just throw it away. Likewise for
17346 a BLKmode type, since any implicit inner load should
17347 already have been turned into an explicit one by the
17348 gimplification process. */
17349 *expr_p = NULL;
17352 /* If we are gimplifying at the statement level, we're done. Tack
17353 everything together and return. */
17354 if (fallback == fb_none || is_statement)
17356 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
17357 it out for GC to reclaim it. */
17358 *expr_p = NULL_TREE;
17360 if (!gimple_seq_empty_p (internal_pre)
17361 || !gimple_seq_empty_p (internal_post))
17363 gimplify_seq_add_seq (&internal_pre, internal_post);
17364 gimplify_seq_add_seq (pre_p, internal_pre);
17367 /* The result of gimplifying *EXPR_P is going to be the last few
17368 statements in *PRE_P and *POST_P. Add location information
17369 to all the statements that were added by the gimplification
17370 helpers. */
17371 if (!gimple_seq_empty_p (*pre_p))
17372 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
17374 if (!gimple_seq_empty_p (*post_p))
17375 annotate_all_with_location_after (*post_p, post_last_gsi,
17376 input_location);
17378 goto out;
17381 #ifdef ENABLE_GIMPLE_CHECKING
17382 if (*expr_p)
17384 enum tree_code code = TREE_CODE (*expr_p);
17385 /* These expressions should already be in gimple IR form. */
17386 gcc_assert (code != MODIFY_EXPR
17387 && code != ASM_EXPR
17388 && code != BIND_EXPR
17389 && code != CATCH_EXPR
17390 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
17391 && code != EH_FILTER_EXPR
17392 && code != GOTO_EXPR
17393 && code != LABEL_EXPR
17394 && code != LOOP_EXPR
17395 && code != SWITCH_EXPR
17396 && code != TRY_FINALLY_EXPR
17397 && code != EH_ELSE_EXPR
17398 && code != OACC_PARALLEL
17399 && code != OACC_KERNELS
17400 && code != OACC_SERIAL
17401 && code != OACC_DATA
17402 && code != OACC_HOST_DATA
17403 && code != OACC_DECLARE
17404 && code != OACC_UPDATE
17405 && code != OACC_ENTER_DATA
17406 && code != OACC_EXIT_DATA
17407 && code != OACC_CACHE
17408 && code != OMP_CRITICAL
17409 && code != OMP_FOR
17410 && code != OACC_LOOP
17411 && code != OMP_MASTER
17412 && code != OMP_MASKED
17413 && code != OMP_TASKGROUP
17414 && code != OMP_ORDERED
17415 && code != OMP_PARALLEL
17416 && code != OMP_SCAN
17417 && code != OMP_SECTIONS
17418 && code != OMP_SECTION
17419 && code != OMP_SINGLE
17420 && code != OMP_SCOPE);
17422 #endif
17424 /* Otherwise we're gimplifying a subexpression, so the resulting
17425 value is interesting. If it's a valid operand that matches
17426 GIMPLE_TEST_F, we're done. Unless we are handling some
17427 post-effects internally; if that's the case, we need to copy into
17428 a temporary before adding the post-effects to POST_P. */
17429 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
17430 goto out;
17432 /* Otherwise, we need to create a new temporary for the gimplified
17433 expression. */
17435 /* We can't return an lvalue if we have an internal postqueue. The
17436 object the lvalue refers to would (probably) be modified by the
17437 postqueue; we need to copy the value out first, which means an
17438 rvalue. */
17439 if ((fallback & fb_lvalue)
17440 && gimple_seq_empty_p (internal_post)
17441 && is_gimple_addressable (*expr_p))
17443 /* An lvalue will do. Take the address of the expression, store it
17444 in a temporary, and replace the expression with an INDIRECT_REF of
17445 that temporary. */
17446 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
17447 unsigned int ref_align = get_object_alignment (*expr_p);
17448 tree ref_type = TREE_TYPE (*expr_p);
17449 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
17450 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
17451 if (TYPE_ALIGN (ref_type) != ref_align)
17452 ref_type = build_aligned_type (ref_type, ref_align);
17453 *expr_p = build2 (MEM_REF, ref_type,
17454 tmp, build_zero_cst (ref_alias_type));
17456 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
17458 /* An rvalue will do. Assign the gimplified expression into a
17459 new temporary TMP and replace the original expression with
17460 TMP. First, make sure that the expression has a type so that
17461 it can be assigned into a temporary. */
17462 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
17463 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
17465 else
17467 #ifdef ENABLE_GIMPLE_CHECKING
17468 if (!(fallback & fb_mayfail))
17470 fprintf (stderr, "gimplification failed:\n");
17471 print_generic_expr (stderr, *expr_p);
17472 debug_tree (*expr_p);
17473 internal_error ("gimplification failed");
17475 #endif
17476 gcc_assert (fallback & fb_mayfail);
17478 /* If this is an asm statement, and the user asked for the
17479 impossible, don't die. Fail and let gimplify_asm_expr
17480 issue an error. */
17481 ret = GS_ERROR;
17482 goto out;
17485 /* Make sure the temporary matches our predicate. */
17486 gcc_assert ((*gimple_test_f) (*expr_p));
17488 if (!gimple_seq_empty_p (internal_post))
17490 annotate_all_with_location (internal_post, input_location);
17491 gimplify_seq_add_seq (pre_p, internal_post);
17494 out:
17495 input_location = saved_location;
17496 return ret;
17499 /* Like gimplify_expr but make sure the gimplified result is not itself
17500 a SSA name (but a decl if it were). Temporaries required by
17501 evaluating *EXPR_P may be still SSA names. */
17503 static enum gimplify_status
17504 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
17505 bool (*gimple_test_f) (tree), fallback_t fallback,
17506 bool allow_ssa)
17508 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
17509 gimple_test_f, fallback);
17510 if (! allow_ssa
17511 && TREE_CODE (*expr_p) == SSA_NAME)
17512 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
17513 return ret;
17516 /* Look through TYPE for variable-sized objects and gimplify each such
17517 size that we find. Add to LIST_P any statements generated. */
17519 void
17520 gimplify_type_sizes (tree type, gimple_seq *list_p)
17522 if (type == NULL || type == error_mark_node)
17523 return;
17525 const bool ignored_p
17526 = TYPE_NAME (type)
17527 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
17528 && DECL_IGNORED_P (TYPE_NAME (type));
17529 tree t;
17531 /* We first do the main variant, then copy into any other variants. */
17532 type = TYPE_MAIN_VARIANT (type);
17534 /* Avoid infinite recursion. */
17535 if (TYPE_SIZES_GIMPLIFIED (type))
17536 return;
17538 TYPE_SIZES_GIMPLIFIED (type) = 1;
17540 switch (TREE_CODE (type))
17542 case INTEGER_TYPE:
17543 case ENUMERAL_TYPE:
17544 case BOOLEAN_TYPE:
17545 case REAL_TYPE:
17546 case FIXED_POINT_TYPE:
17547 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
17548 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
17550 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17552 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
17553 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
17555 break;
17557 case ARRAY_TYPE:
17558 /* These types may not have declarations, so handle them here. */
17559 gimplify_type_sizes (TREE_TYPE (type), list_p);
17560 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
17561 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
17562 with assigned stack slots, for -O1+ -g they should be tracked
17563 by VTA. */
17564 if (!ignored_p
17565 && TYPE_DOMAIN (type)
17566 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
17568 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
17569 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17570 DECL_IGNORED_P (t) = 0;
17571 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
17572 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17573 DECL_IGNORED_P (t) = 0;
17575 break;
17577 case RECORD_TYPE:
17578 case UNION_TYPE:
17579 case QUAL_UNION_TYPE:
17580 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
17581 if (TREE_CODE (field) == FIELD_DECL)
17583 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
17584 /* Likewise, ensure variable offsets aren't removed. */
17585 if (!ignored_p
17586 && (t = DECL_FIELD_OFFSET (field))
17587 && VAR_P (t)
17588 && DECL_ARTIFICIAL (t))
17589 DECL_IGNORED_P (t) = 0;
17590 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
17591 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
17592 gimplify_type_sizes (TREE_TYPE (field), list_p);
17594 break;
17596 case POINTER_TYPE:
17597 case REFERENCE_TYPE:
17598 /* We used to recurse on the pointed-to type here, which turned out to
17599 be incorrect because its definition might refer to variables not
17600 yet initialized at this point if a forward declaration is involved.
17602 It was actually useful for anonymous pointed-to types to ensure
17603 that the sizes evaluation dominates every possible later use of the
17604 values. Restricting to such types here would be safe since there
17605 is no possible forward declaration around, but would introduce an
17606 undesirable middle-end semantic to anonymity. We then defer to
17607 front-ends the responsibility of ensuring that the sizes are
17608 evaluated both early and late enough, e.g. by attaching artificial
17609 type declarations to the tree. */
17610 break;
17612 default:
17613 break;
17616 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
17617 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
17619 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17621 TYPE_SIZE (t) = TYPE_SIZE (type);
17622 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
17623 TYPE_SIZES_GIMPLIFIED (t) = 1;
17627 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
17628 a size or position, has had all of its SAVE_EXPRs evaluated.
17629 We add any required statements to *STMT_P. */
17631 void
17632 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
17634 tree expr = *expr_p;
17636 /* We don't do anything if the value isn't there, is constant, or contains
17637 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
17638 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
17639 will want to replace it with a new variable, but that will cause problems
17640 if this type is from outside the function. It's OK to have that here. */
17641 if (expr == NULL_TREE
17642 || is_gimple_constant (expr)
17643 || VAR_P (expr)
17644 || CONTAINS_PLACEHOLDER_P (expr))
17645 return;
17647 *expr_p = unshare_expr (expr);
17649 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
17650 if the def vanishes. */
17651 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
17653 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
17654 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
17655 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
17656 if (is_gimple_constant (*expr_p))
17657 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
17660 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
17661 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
17662 is true, also gimplify the parameters. */
17664 gbind *
17665 gimplify_body (tree fndecl, bool do_parms)
17667 location_t saved_location = input_location;
17668 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
17669 gimple *outer_stmt;
17670 gbind *outer_bind;
17672 timevar_push (TV_TREE_GIMPLIFY);
17674 init_tree_ssa (cfun);
17676 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
17677 gimplification. */
17678 default_rtl_profile ();
17680 gcc_assert (gimplify_ctxp == NULL);
17681 push_gimplify_context (true);
17683 if (flag_openacc || flag_openmp)
17685 gcc_assert (gimplify_omp_ctxp == NULL);
17686 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
17687 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
17690 /* Unshare most shared trees in the body and in that of any nested functions.
17691 It would seem we don't have to do this for nested functions because
17692 they are supposed to be output and then the outer function gimplified
17693 first, but the g++ front end doesn't always do it that way. */
17694 unshare_body (fndecl);
17695 unvisit_body (fndecl);
17697 /* Make sure input_location isn't set to something weird. */
17698 input_location = DECL_SOURCE_LOCATION (fndecl);
17700 /* Resolve callee-copies. This has to be done before processing
17701 the body so that DECL_VALUE_EXPR gets processed correctly. */
17702 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
17704 /* Gimplify the function's body. */
17705 seq = NULL;
17706 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
17707 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
17708 if (!outer_stmt)
17710 outer_stmt = gimple_build_nop ();
17711 gimplify_seq_add_stmt (&seq, outer_stmt);
17714 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
17715 not the case, wrap everything in a GIMPLE_BIND to make it so. */
17716 if (gimple_code (outer_stmt) == GIMPLE_BIND
17717 && (gimple_seq_first_nondebug_stmt (seq)
17718 == gimple_seq_last_nondebug_stmt (seq)))
17720 outer_bind = as_a <gbind *> (outer_stmt);
17721 if (gimple_seq_first_stmt (seq) != outer_stmt
17722 || gimple_seq_last_stmt (seq) != outer_stmt)
17724 /* If there are debug stmts before or after outer_stmt, move them
17725 inside of outer_bind body. */
17726 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
17727 gimple_seq second_seq = NULL;
17728 if (gimple_seq_first_stmt (seq) != outer_stmt
17729 && gimple_seq_last_stmt (seq) != outer_stmt)
17731 second_seq = gsi_split_seq_after (gsi);
17732 gsi_remove (&gsi, false);
17734 else if (gimple_seq_first_stmt (seq) != outer_stmt)
17735 gsi_remove (&gsi, false);
17736 else
17738 gsi_remove (&gsi, false);
17739 second_seq = seq;
17740 seq = NULL;
17742 gimple_seq_add_seq_without_update (&seq,
17743 gimple_bind_body (outer_bind));
17744 gimple_seq_add_seq_without_update (&seq, second_seq);
17745 gimple_bind_set_body (outer_bind, seq);
17748 else
17749 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
17751 DECL_SAVED_TREE (fndecl) = NULL_TREE;
17753 /* If we had callee-copies statements, insert them at the beginning
17754 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
17755 if (!gimple_seq_empty_p (parm_stmts))
17757 tree parm;
17759 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
17760 if (parm_cleanup)
17762 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
17763 GIMPLE_TRY_FINALLY);
17764 parm_stmts = NULL;
17765 gimple_seq_add_stmt (&parm_stmts, g);
17767 gimple_bind_set_body (outer_bind, parm_stmts);
17769 for (parm = DECL_ARGUMENTS (current_function_decl);
17770 parm; parm = DECL_CHAIN (parm))
17771 if (DECL_HAS_VALUE_EXPR_P (parm))
17773 DECL_HAS_VALUE_EXPR_P (parm) = 0;
17774 DECL_IGNORED_P (parm) = 0;
17778 if ((flag_openacc || flag_openmp || flag_openmp_simd)
17779 && gimplify_omp_ctxp)
17781 delete_omp_context (gimplify_omp_ctxp);
17782 gimplify_omp_ctxp = NULL;
17785 pop_gimplify_context (outer_bind);
17786 gcc_assert (gimplify_ctxp == NULL);
17788 if (flag_checking && !seen_error ())
17789 verify_gimple_in_seq (gimple_bind_body (outer_bind));
17791 timevar_pop (TV_TREE_GIMPLIFY);
17792 input_location = saved_location;
17794 return outer_bind;
17797 typedef char *char_p; /* For DEF_VEC_P. */
17799 /* Return whether we should exclude FNDECL from instrumentation. */
17801 static bool
17802 flag_instrument_functions_exclude_p (tree fndecl)
17804 vec<char_p> *v;
17806 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
17807 if (v && v->length () > 0)
17809 const char *name;
17810 int i;
17811 char *s;
17813 name = lang_hooks.decl_printable_name (fndecl, 1);
17814 FOR_EACH_VEC_ELT (*v, i, s)
17815 if (strstr (name, s) != NULL)
17816 return true;
17819 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
17820 if (v && v->length () > 0)
17822 const char *name;
17823 int i;
17824 char *s;
17826 name = DECL_SOURCE_FILE (fndecl);
17827 FOR_EACH_VEC_ELT (*v, i, s)
17828 if (strstr (name, s) != NULL)
17829 return true;
17832 return false;
17835 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
17836 If COND_VAR is not NULL, it is a boolean variable guarding the call to
17837 the instrumentation function. IF STMT is not NULL, it is a statement
17838 to be executed just before the call to the instrumentation function. */
17840 static void
17841 build_instrumentation_call (gimple_seq *seq, enum built_in_function fncode,
17842 tree cond_var, gimple *stmt)
17844 /* The instrumentation hooks aren't going to call the instrumented
17845 function and the address they receive is expected to be matchable
17846 against symbol addresses. Make sure we don't create a trampoline,
17847 in case the current function is nested. */
17848 tree this_fn_addr = build_fold_addr_expr (current_function_decl);
17849 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
17851 tree label_true, label_false;
17852 if (cond_var)
17854 label_true = create_artificial_label (UNKNOWN_LOCATION);
17855 label_false = create_artificial_label (UNKNOWN_LOCATION);
17856 gcond *cond = gimple_build_cond (EQ_EXPR, cond_var, boolean_false_node,
17857 label_true, label_false);
17858 gimplify_seq_add_stmt (seq, cond);
17859 gimplify_seq_add_stmt (seq, gimple_build_label (label_true));
17860 gimplify_seq_add_stmt (seq, gimple_build_predict (PRED_COLD_LABEL,
17861 NOT_TAKEN));
17864 if (stmt)
17865 gimplify_seq_add_stmt (seq, stmt);
17867 tree x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
17868 gcall *call = gimple_build_call (x, 1, integer_zero_node);
17869 tree tmp_var = create_tmp_var (ptr_type_node, "return_addr");
17870 gimple_call_set_lhs (call, tmp_var);
17871 gimplify_seq_add_stmt (seq, call);
17872 x = builtin_decl_implicit (fncode);
17873 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
17874 gimplify_seq_add_stmt (seq, call);
17876 if (cond_var)
17877 gimplify_seq_add_stmt (seq, gimple_build_label (label_false));
17880 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
17881 node for the function we want to gimplify.
17883 Return the sequence of GIMPLE statements corresponding to the body
17884 of FNDECL. */
17886 void
17887 gimplify_function_tree (tree fndecl)
17889 gimple_seq seq;
17890 gbind *bind;
17892 gcc_assert (!gimple_body (fndecl));
17894 if (DECL_STRUCT_FUNCTION (fndecl))
17895 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
17896 else
17897 push_struct_function (fndecl);
17899 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
17900 if necessary. */
17901 cfun->curr_properties |= PROP_gimple_lva;
17903 if (asan_sanitize_use_after_scope ())
17904 asan_poisoned_variables = new hash_set<tree> ();
17905 bind = gimplify_body (fndecl, true);
17906 if (asan_poisoned_variables)
17908 delete asan_poisoned_variables;
17909 asan_poisoned_variables = NULL;
17912 /* The tree body of the function is no longer needed, replace it
17913 with the new GIMPLE body. */
17914 seq = NULL;
17915 gimple_seq_add_stmt (&seq, bind);
17916 gimple_set_body (fndecl, seq);
17918 /* If we're instrumenting function entry/exit, then prepend the call to
17919 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
17920 catch the exit hook. */
17921 /* ??? Add some way to ignore exceptions for this TFE. */
17922 if (flag_instrument_function_entry_exit
17923 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
17924 /* Do not instrument extern inline functions. */
17925 && !(DECL_DECLARED_INLINE_P (fndecl)
17926 && DECL_EXTERNAL (fndecl)
17927 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
17928 && !flag_instrument_functions_exclude_p (fndecl))
17930 gimple_seq body = NULL, cleanup = NULL;
17931 gassign *assign;
17932 tree cond_var;
17934 /* If -finstrument-functions-once is specified, generate:
17936 static volatile bool C.0 = false;
17937 bool tmp_called;
17939 tmp_called = C.0;
17940 if (!tmp_called)
17942 C.0 = true;
17943 [call profiling enter function]
17946 without specific protection for data races. */
17947 if (flag_instrument_function_entry_exit > 1)
17949 tree first_var
17950 = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
17951 VAR_DECL,
17952 create_tmp_var_name ("C"),
17953 boolean_type_node);
17954 DECL_ARTIFICIAL (first_var) = 1;
17955 DECL_IGNORED_P (first_var) = 1;
17956 TREE_STATIC (first_var) = 1;
17957 TREE_THIS_VOLATILE (first_var) = 1;
17958 TREE_USED (first_var) = 1;
17959 DECL_INITIAL (first_var) = boolean_false_node;
17960 varpool_node::add (first_var);
17962 cond_var = create_tmp_var (boolean_type_node, "tmp_called");
17963 assign = gimple_build_assign (cond_var, first_var);
17964 gimplify_seq_add_stmt (&body, assign);
17966 assign = gimple_build_assign (first_var, boolean_true_node);
17969 else
17971 cond_var = NULL_TREE;
17972 assign = NULL;
17975 build_instrumentation_call (&body, BUILT_IN_PROFILE_FUNC_ENTER,
17976 cond_var, assign);
17978 /* If -finstrument-functions-once is specified, generate:
17980 if (!tmp_called)
17981 [call profiling exit function]
17983 without specific protection for data races. */
17984 build_instrumentation_call (&cleanup, BUILT_IN_PROFILE_FUNC_EXIT,
17985 cond_var, NULL);
17987 gimple *tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
17988 gimplify_seq_add_stmt (&body, tf);
17989 gbind *new_bind = gimple_build_bind (NULL, body, NULL);
17991 /* Replace the current function body with the body
17992 wrapped in the try/finally TF. */
17993 seq = NULL;
17994 gimple_seq_add_stmt (&seq, new_bind);
17995 gimple_set_body (fndecl, seq);
17996 bind = new_bind;
17999 if (sanitize_flags_p (SANITIZE_THREAD)
18000 && param_tsan_instrument_func_entry_exit)
18002 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
18003 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
18004 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
18005 /* Replace the current function body with the body
18006 wrapped in the try/finally TF. */
18007 seq = NULL;
18008 gimple_seq_add_stmt (&seq, new_bind);
18009 gimple_set_body (fndecl, seq);
18012 DECL_SAVED_TREE (fndecl) = NULL_TREE;
18013 cfun->curr_properties |= PROP_gimple_any;
18015 pop_cfun ();
18017 dump_function (TDI_gimple, fndecl);
18020 /* Return a dummy expression of type TYPE in order to keep going after an
18021 error. */
18023 static tree
18024 dummy_object (tree type)
18026 tree t = build_int_cst (build_pointer_type (type), 0);
18027 return build2 (MEM_REF, type, t, t);
18030 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
18031 builtin function, but a very special sort of operator. */
18033 enum gimplify_status
18034 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
18035 gimple_seq *post_p ATTRIBUTE_UNUSED)
18037 tree promoted_type, have_va_type;
18038 tree valist = TREE_OPERAND (*expr_p, 0);
18039 tree type = TREE_TYPE (*expr_p);
18040 tree t, tag, aptag;
18041 location_t loc = EXPR_LOCATION (*expr_p);
18043 /* Verify that valist is of the proper type. */
18044 have_va_type = TREE_TYPE (valist);
18045 if (have_va_type == error_mark_node)
18046 return GS_ERROR;
18047 have_va_type = targetm.canonical_va_list_type (have_va_type);
18048 if (have_va_type == NULL_TREE
18049 && POINTER_TYPE_P (TREE_TYPE (valist)))
18050 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
18051 have_va_type
18052 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
18053 gcc_assert (have_va_type != NULL_TREE);
18055 /* Generate a diagnostic for requesting data of a type that cannot
18056 be passed through `...' due to type promotion at the call site. */
18057 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
18058 != type)
18060 static bool gave_help;
18061 bool warned;
18062 /* Use the expansion point to handle cases such as passing bool (defined
18063 in a system header) through `...'. */
18064 location_t xloc
18065 = expansion_point_location_if_in_system_header (loc);
18067 /* Unfortunately, this is merely undefined, rather than a constraint
18068 violation, so we cannot make this an error. If this call is never
18069 executed, the program is still strictly conforming. */
18070 auto_diagnostic_group d;
18071 warned = warning_at (xloc, 0,
18072 "%qT is promoted to %qT when passed through %<...%>",
18073 type, promoted_type);
18074 if (!gave_help && warned)
18076 gave_help = true;
18077 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
18078 promoted_type, type);
18081 /* We can, however, treat "undefined" any way we please.
18082 Call abort to encourage the user to fix the program. */
18083 if (warned)
18084 inform (xloc, "if this code is reached, the program will abort");
18085 /* Before the abort, allow the evaluation of the va_list
18086 expression to exit or longjmp. */
18087 gimplify_and_add (valist, pre_p);
18088 t = build_call_expr_loc (loc,
18089 builtin_decl_implicit (BUILT_IN_TRAP), 0);
18090 gimplify_and_add (t, pre_p);
18092 /* This is dead code, but go ahead and finish so that the
18093 mode of the result comes out right. */
18094 *expr_p = dummy_object (type);
18095 return GS_ALL_DONE;
18098 tag = build_int_cst (build_pointer_type (type), 0);
18099 aptag = build_int_cst (TREE_TYPE (valist), 0);
18101 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
18102 valist, tag, aptag);
18104 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
18105 needs to be expanded. */
18106 cfun->curr_properties &= ~PROP_gimple_lva;
18108 return GS_OK;
18111 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
18113 DST/SRC are the destination and source respectively. You can pass
18114 ungimplified trees in DST or SRC, in which case they will be
18115 converted to a gimple operand if necessary.
18117 This function returns the newly created GIMPLE_ASSIGN tuple. */
18119 gimple *
18120 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
18122 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
18123 gimplify_and_add (t, seq_p);
18124 ggc_free (t);
18125 return gimple_seq_last_stmt (*seq_p);
18128 inline hashval_t
18129 gimplify_hasher::hash (const elt_t *p)
18131 tree t = p->val;
18132 return iterative_hash_expr (t, 0);
18135 inline bool
18136 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
18138 tree t1 = p1->val;
18139 tree t2 = p2->val;
18140 enum tree_code code = TREE_CODE (t1);
18142 if (TREE_CODE (t2) != code
18143 || TREE_TYPE (t1) != TREE_TYPE (t2))
18144 return false;
18146 if (!operand_equal_p (t1, t2, 0))
18147 return false;
18149 /* Only allow them to compare equal if they also hash equal; otherwise
18150 results are nondeterminate, and we fail bootstrap comparison. */
18151 gcc_checking_assert (hash (p1) == hash (p2));
18153 return true;