libstdc++: [_Hashtable] Fix some implementation inconsistencies
[official-gcc.git] / gcc / gimplify.cc
blobceb53e5d5bb7434398d3e7a1aaf3cd58eff20efc
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2024 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "diagnostic.h" /* For errorcount. */
40 #include "alias.h"
41 #include "fold-const.h"
42 #include "calls.h"
43 #include "varasm.h"
44 #include "stmt.h"
45 #include "expr.h"
46 #include "gimple-iterator.h"
47 #include "gimple-fold.h"
48 #include "tree-eh.h"
49 #include "gimplify.h"
50 #include "stor-layout.h"
51 #include "print-tree.h"
52 #include "tree-iterator.h"
53 #include "tree-inline.h"
54 #include "langhooks.h"
55 #include "tree-cfg.h"
56 #include "tree-ssa.h"
57 #include "tree-hash-traits.h"
58 #include "omp-general.h"
59 #include "omp-low.h"
60 #include "gimple-low.h"
61 #include "gomp-constants.h"
62 #include "splay-tree.h"
63 #include "gimple-walk.h"
64 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
65 #include "builtins.h"
66 #include "stringpool.h"
67 #include "attribs.h"
68 #include "asan.h"
69 #include "dbgcnt.h"
70 #include "omp-offload.h"
71 #include "context.h"
72 #include "tree-nested.h"
74 /* Identifier for a basic condition, mapping it to other basic conditions of
75 its Boolean expression. Basic conditions given the same uid (in the same
76 function) are parts of the same ANDIF/ORIF expression. Used for condition
77 coverage. */
78 static unsigned nextuid = 1;
79 /* Get a fresh identifier for a new condition expression. This is used for
80 condition coverage. */
81 static unsigned
82 next_cond_uid ()
84 return nextuid++;
86 /* Reset the condition uid to the value it should have when compiling a new
87 function. 0 is already the default/untouched value, so start at non-zero.
88 A valid and set id should always be > 0. This is used for condition
89 coverage. */
90 static void
91 reset_cond_uid ()
93 nextuid = 1;
96 /* Hash set of poisoned variables in a bind expr. */
97 static hash_set<tree> *asan_poisoned_variables = NULL;
99 enum gimplify_omp_var_data
101 GOVD_SEEN = 0x000001,
102 GOVD_EXPLICIT = 0x000002,
103 GOVD_SHARED = 0x000004,
104 GOVD_PRIVATE = 0x000008,
105 GOVD_FIRSTPRIVATE = 0x000010,
106 GOVD_LASTPRIVATE = 0x000020,
107 GOVD_REDUCTION = 0x000040,
108 GOVD_LOCAL = 0x00080,
109 GOVD_MAP = 0x000100,
110 GOVD_DEBUG_PRIVATE = 0x000200,
111 GOVD_PRIVATE_OUTER_REF = 0x000400,
112 GOVD_LINEAR = 0x000800,
113 GOVD_ALIGNED = 0x001000,
115 /* Flag for GOVD_MAP: don't copy back. */
116 GOVD_MAP_TO_ONLY = 0x002000,
118 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
119 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
121 GOVD_MAP_0LEN_ARRAY = 0x008000,
123 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
124 GOVD_MAP_ALWAYS_TO = 0x010000,
126 /* Flag for shared vars that are or might be stored to in the region. */
127 GOVD_WRITTEN = 0x020000,
129 /* Flag for GOVD_MAP, if it is a forced mapping. */
130 GOVD_MAP_FORCE = 0x040000,
132 /* Flag for GOVD_MAP: must be present already. */
133 GOVD_MAP_FORCE_PRESENT = 0x080000,
135 /* Flag for GOVD_MAP: only allocate. */
136 GOVD_MAP_ALLOC_ONLY = 0x100000,
138 /* Flag for GOVD_MAP: only copy back. */
139 GOVD_MAP_FROM_ONLY = 0x200000,
141 GOVD_NONTEMPORAL = 0x400000,
143 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
144 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
146 GOVD_CONDTEMP = 0x1000000,
148 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
149 GOVD_REDUCTION_INSCAN = 0x2000000,
151 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
152 GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
154 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
155 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
156 | GOVD_LOCAL)
160 enum omp_region_type
162 ORT_WORKSHARE = 0x00,
163 ORT_TASKGROUP = 0x01,
164 ORT_SIMD = 0x04,
166 ORT_PARALLEL = 0x08,
167 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
169 ORT_TASK = 0x10,
170 ORT_UNTIED_TASK = ORT_TASK | 1,
171 ORT_TASKLOOP = ORT_TASK | 2,
172 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
174 ORT_TEAMS = 0x20,
175 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
176 ORT_HOST_TEAMS = ORT_TEAMS | 2,
177 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
179 /* Data region. */
180 ORT_TARGET_DATA = 0x40,
182 /* Data region with offloading. */
183 ORT_TARGET = 0x80,
184 ORT_COMBINED_TARGET = ORT_TARGET | 1,
185 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
187 /* OpenACC variants. */
188 ORT_ACC = 0x100, /* A generic OpenACC region. */
189 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
190 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
191 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
192 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
193 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
195 /* Dummy OpenMP region, used to disable expansion of
196 DECL_VALUE_EXPRs in taskloop pre body. */
197 ORT_NONE = 0x200
200 /* Gimplify hashtable helper. */
202 struct gimplify_hasher : free_ptr_hash <elt_t>
204 static inline hashval_t hash (const elt_t *);
205 static inline bool equal (const elt_t *, const elt_t *);
208 struct gimplify_ctx
210 struct gimplify_ctx *prev_context;
212 vec<gbind *> bind_expr_stack;
213 tree temps;
214 gimple_seq conditional_cleanups;
215 tree exit_label;
216 tree return_temp;
218 vec<tree> case_labels;
219 hash_set<tree> *live_switch_vars;
220 /* The formal temporary table. Should this be persistent? */
221 hash_table<gimplify_hasher> *temp_htab;
223 int conditions;
224 unsigned into_ssa : 1;
225 unsigned allow_rhs_cond_expr : 1;
226 unsigned in_cleanup_point_expr : 1;
227 unsigned keep_stack : 1;
228 unsigned save_stack : 1;
229 unsigned in_switch_expr : 1;
230 unsigned in_handler_expr : 1;
233 enum gimplify_defaultmap_kind
235 GDMK_SCALAR,
236 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
237 GDMK_AGGREGATE,
238 GDMK_ALLOCATABLE,
239 GDMK_POINTER
242 struct gimplify_omp_ctx
244 struct gimplify_omp_ctx *outer_context;
245 splay_tree variables;
246 hash_set<tree> *privatized_types;
247 tree clauses;
248 /* Iteration variables in an OMP_FOR. */
249 vec<tree> loop_iter_var;
250 location_t location;
251 enum omp_clause_default_kind default_kind;
252 enum omp_region_type region_type;
253 enum tree_code code;
254 bool combined_loop;
255 bool distribute;
256 bool target_firstprivatize_array_bases;
257 bool add_safelen1;
258 bool order_concurrent;
259 bool has_depend;
260 bool in_for_exprs;
261 int defaultmap[5];
264 static struct gimplify_ctx *gimplify_ctxp;
265 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
266 static bool in_omp_construct;
268 /* Forward declaration. */
269 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
270 static hash_map<tree, tree> *oacc_declare_returns;
271 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
272 bool (*) (tree), fallback_t, bool);
273 static void prepare_gimple_addressable (tree *, gimple_seq *);
275 /* Shorter alias name for the above function for use in gimplify.cc
276 only. */
278 static inline void
279 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
281 gimple_seq_add_stmt_without_update (seq_p, gs);
284 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
285 NULL, a new sequence is allocated. This function is
286 similar to gimple_seq_add_seq, but does not scan the operands.
287 During gimplification, we need to manipulate statement sequences
288 before the def/use vectors have been constructed. */
290 static void
291 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
293 gimple_stmt_iterator si;
295 if (src == NULL)
296 return;
298 si = gsi_last (*dst_p);
299 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
303 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
304 and popping gimplify contexts. */
306 static struct gimplify_ctx *ctx_pool = NULL;
308 /* Return a gimplify context struct from the pool. */
310 static inline struct gimplify_ctx *
311 ctx_alloc (void)
313 struct gimplify_ctx * c = ctx_pool;
315 if (c)
316 ctx_pool = c->prev_context;
317 else
318 c = XNEW (struct gimplify_ctx);
320 memset (c, '\0', sizeof (*c));
321 return c;
324 /* Put gimplify context C back into the pool. */
326 static inline void
327 ctx_free (struct gimplify_ctx *c)
329 c->prev_context = ctx_pool;
330 ctx_pool = c;
333 /* Free allocated ctx stack memory. */
335 void
336 free_gimplify_stack (void)
338 struct gimplify_ctx *c;
340 while ((c = ctx_pool))
342 ctx_pool = c->prev_context;
343 free (c);
348 /* Set up a context for the gimplifier. */
350 void
351 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
353 struct gimplify_ctx *c = ctx_alloc ();
355 c->prev_context = gimplify_ctxp;
356 gimplify_ctxp = c;
357 gimplify_ctxp->into_ssa = in_ssa;
358 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
361 /* Tear down a context for the gimplifier. If BODY is non-null, then
362 put the temporaries into the outer BIND_EXPR. Otherwise, put them
363 in the local_decls.
365 BODY is not a sequence, but the first tuple in a sequence. */
367 void
368 pop_gimplify_context (gimple *body)
370 struct gimplify_ctx *c = gimplify_ctxp;
372 gcc_assert (c
373 && (!c->bind_expr_stack.exists ()
374 || c->bind_expr_stack.is_empty ()));
375 c->bind_expr_stack.release ();
376 gimplify_ctxp = c->prev_context;
378 if (body)
379 declare_vars (c->temps, body, false);
380 else
381 record_vars (c->temps);
383 delete c->temp_htab;
384 c->temp_htab = NULL;
385 ctx_free (c);
388 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
390 static void
391 gimple_push_bind_expr (gbind *bind_stmt)
393 gimplify_ctxp->bind_expr_stack.reserve (8);
394 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
397 /* Pop the first element off the stack of bindings. */
399 static void
400 gimple_pop_bind_expr (void)
402 gimplify_ctxp->bind_expr_stack.pop ();
405 /* Return the first element of the stack of bindings. */
407 gbind *
408 gimple_current_bind_expr (void)
410 return gimplify_ctxp->bind_expr_stack.last ();
413 /* Return the stack of bindings created during gimplification. */
415 vec<gbind *>
416 gimple_bind_expr_stack (void)
418 return gimplify_ctxp->bind_expr_stack;
421 /* Return true iff there is a COND_EXPR between us and the innermost
422 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
424 static bool
425 gimple_conditional_context (void)
427 return gimplify_ctxp->conditions > 0;
430 /* Note that we've entered a COND_EXPR. */
432 static void
433 gimple_push_condition (void)
435 #ifdef ENABLE_GIMPLE_CHECKING
436 if (gimplify_ctxp->conditions == 0)
437 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
438 #endif
439 ++(gimplify_ctxp->conditions);
442 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
443 now, add any conditional cleanups we've seen to the prequeue. */
445 static void
446 gimple_pop_condition (gimple_seq *pre_p)
448 int conds = --(gimplify_ctxp->conditions);
450 gcc_assert (conds >= 0);
451 if (conds == 0)
453 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
454 gimplify_ctxp->conditional_cleanups = NULL;
458 /* A stable comparison routine for use with splay trees and DECLs. */
460 static int
461 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
463 tree a = (tree) xa;
464 tree b = (tree) xb;
466 return DECL_UID (a) - DECL_UID (b);
469 /* Create a new omp construct that deals with variable remapping. */
471 static struct gimplify_omp_ctx *
472 new_omp_context (enum omp_region_type region_type)
474 struct gimplify_omp_ctx *c;
476 c = XCNEW (struct gimplify_omp_ctx);
477 c->outer_context = gimplify_omp_ctxp;
478 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
479 c->privatized_types = new hash_set<tree>;
480 c->location = input_location;
481 c->region_type = region_type;
482 if ((region_type & ORT_TASK) == 0)
483 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
484 else
485 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
486 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
487 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
488 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
489 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
490 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
492 return c;
495 /* Destroy an omp construct that deals with variable remapping. */
497 static void
498 delete_omp_context (struct gimplify_omp_ctx *c)
500 splay_tree_delete (c->variables);
501 delete c->privatized_types;
502 c->loop_iter_var.release ();
503 XDELETE (c);
506 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
507 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
509 /* Both gimplify the statement T and append it to *SEQ_P. This function
510 behaves exactly as gimplify_stmt, but you don't have to pass T as a
511 reference. */
513 void
514 gimplify_and_add (tree t, gimple_seq *seq_p)
516 gimplify_stmt (&t, seq_p);
519 /* Gimplify statement T into sequence *SEQ_P, and return the first
520 tuple in the sequence of generated tuples for this statement.
521 Return NULL if gimplifying T produced no tuples. */
523 static gimple *
524 gimplify_and_return_first (tree t, gimple_seq *seq_p)
526 gimple_stmt_iterator last = gsi_last (*seq_p);
528 gimplify_and_add (t, seq_p);
530 if (!gsi_end_p (last))
532 gsi_next (&last);
533 return gsi_stmt (last);
535 else
536 return gimple_seq_first_stmt (*seq_p);
539 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
540 LHS, or for a call argument. */
542 static bool
543 is_gimple_mem_rhs (tree t)
545 /* If we're dealing with a renamable type, either source or dest must be
546 a renamed variable. */
547 if (is_gimple_reg_type (TREE_TYPE (t)))
548 return is_gimple_val (t);
549 else
550 return is_gimple_val (t) || is_gimple_lvalue (t);
553 /* Return true if T is a CALL_EXPR or an expression that can be
554 assigned to a temporary. Note that this predicate should only be
555 used during gimplification. See the rationale for this in
556 gimplify_modify_expr. */
558 static bool
559 is_gimple_reg_rhs_or_call (tree t)
561 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
562 || TREE_CODE (t) == CALL_EXPR);
565 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
566 this predicate should only be used during gimplification. See the
567 rationale for this in gimplify_modify_expr. */
569 static bool
570 is_gimple_mem_rhs_or_call (tree t)
572 /* If we're dealing with a renamable type, either source or dest must be
573 a renamed variable. */
574 if (is_gimple_reg_type (TREE_TYPE (t)))
575 return is_gimple_val (t);
576 else
577 return (is_gimple_val (t)
578 || is_gimple_lvalue (t)
579 || TREE_CLOBBER_P (t)
580 || TREE_CODE (t) == CALL_EXPR);
583 /* Create a temporary with a name derived from VAL. Subroutine of
584 lookup_tmp_var; nobody else should call this function. */
586 static inline tree
587 create_tmp_from_val (tree val)
589 /* Drop all qualifiers and address-space information from the value type. */
590 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
591 tree var = create_tmp_var (type, get_name (val));
592 return var;
595 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
596 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
598 static tree
599 lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
601 tree ret;
603 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
604 gcc_assert (!is_formal || !not_gimple_reg);
606 /* If not optimizing, never really reuse a temporary. local-alloc
607 won't allocate any variable that is used in more than one basic
608 block, which means it will go into memory, causing much extra
609 work in reload and final and poorer code generation, outweighing
610 the extra memory allocation here. */
611 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
613 ret = create_tmp_from_val (val);
614 DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
616 else
618 elt_t elt, *elt_p;
619 elt_t **slot;
621 elt.val = val;
622 if (!gimplify_ctxp->temp_htab)
623 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
624 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
625 if (*slot == NULL)
627 elt_p = XNEW (elt_t);
628 elt_p->val = val;
629 elt_p->temp = ret = create_tmp_from_val (val);
630 *slot = elt_p;
632 else
634 elt_p = *slot;
635 ret = elt_p->temp;
639 return ret;
642 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
644 static tree
645 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
646 bool is_formal, bool allow_ssa, bool not_gimple_reg)
648 tree t, mod;
650 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
651 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
652 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
653 fb_rvalue);
655 if (allow_ssa
656 && gimplify_ctxp->into_ssa
657 && is_gimple_reg_type (TREE_TYPE (val)))
659 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
660 if (! gimple_in_ssa_p (cfun))
662 const char *name = get_name (val);
663 if (name)
664 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
667 else
668 t = lookup_tmp_var (val, is_formal, not_gimple_reg);
670 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
672 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
674 /* gimplify_modify_expr might want to reduce this further. */
675 gimplify_and_add (mod, pre_p);
676 ggc_free (mod);
678 /* If we failed to gimplify VAL then we can end up with the temporary
679 SSA name not having a definition. In this case return a decl. */
680 if (TREE_CODE (t) == SSA_NAME && ! SSA_NAME_DEF_STMT (t))
681 return lookup_tmp_var (val, is_formal, not_gimple_reg);
683 return t;
686 /* Return a formal temporary variable initialized with VAL. PRE_P is as
687 in gimplify_expr. Only use this function if:
689 1) The value of the unfactored expression represented by VAL will not
690 change between the initialization and use of the temporary, and
691 2) The temporary will not be otherwise modified.
693 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
694 and #2 means it is inappropriate for && temps.
696 For other cases, use get_initialized_tmp_var instead. */
698 tree
699 get_formal_tmp_var (tree val, gimple_seq *pre_p)
701 return internal_get_tmp_var (val, pre_p, NULL, true, true, false);
704 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
705 are as in gimplify_expr. */
707 tree
708 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
709 gimple_seq *post_p /* = NULL */,
710 bool allow_ssa /* = true */)
712 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa, false);
715 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
716 generate debug info for them; otherwise don't. */
718 void
719 declare_vars (tree vars, gimple *gs, bool debug_info)
721 tree last = vars;
722 if (last)
724 tree temps, block;
726 gbind *scope = as_a <gbind *> (gs);
728 temps = nreverse (last);
730 block = gimple_bind_block (scope);
731 gcc_assert (!block || TREE_CODE (block) == BLOCK);
732 if (!block || !debug_info)
734 DECL_CHAIN (last) = gimple_bind_vars (scope);
735 gimple_bind_set_vars (scope, temps);
737 else
739 /* We need to attach the nodes both to the BIND_EXPR and to its
740 associated BLOCK for debugging purposes. The key point here
741 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
742 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
743 if (BLOCK_VARS (block))
744 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
745 else
747 gimple_bind_set_vars (scope,
748 chainon (gimple_bind_vars (scope), temps));
749 BLOCK_VARS (block) = temps;
755 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
756 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
757 no such upper bound can be obtained. */
759 static void
760 force_constant_size (tree var)
762 /* The only attempt we make is by querying the maximum size of objects
763 of the variable's type. */
765 HOST_WIDE_INT max_size;
767 gcc_assert (VAR_P (var));
769 max_size = max_int_size_in_bytes (TREE_TYPE (var));
771 gcc_assert (max_size >= 0);
773 DECL_SIZE_UNIT (var)
774 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
775 DECL_SIZE (var)
776 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
779 /* Push the temporary variable TMP into the current binding. */
781 void
782 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
784 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
786 /* Later processing assumes that the object size is constant, which might
787 not be true at this point. Force the use of a constant upper bound in
788 this case. */
789 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
790 force_constant_size (tmp);
792 DECL_CONTEXT (tmp) = fn->decl;
793 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
795 record_vars_into (tmp, fn->decl);
798 /* Push the temporary variable TMP into the current binding. */
800 void
801 gimple_add_tmp_var (tree tmp)
803 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
805 /* Later processing assumes that the object size is constant, which might
806 not be true at this point. Force the use of a constant upper bound in
807 this case. */
808 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
809 force_constant_size (tmp);
811 DECL_CONTEXT (tmp) = current_function_decl;
812 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
814 if (gimplify_ctxp)
816 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
817 gimplify_ctxp->temps = tmp;
819 /* Mark temporaries local within the nearest enclosing parallel. */
820 if (gimplify_omp_ctxp)
822 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
823 int flag = GOVD_LOCAL | GOVD_SEEN;
824 while (ctx
825 && (ctx->region_type == ORT_WORKSHARE
826 || ctx->region_type == ORT_TASKGROUP
827 || ctx->region_type == ORT_SIMD
828 || ctx->region_type == ORT_ACC))
830 if (ctx->region_type == ORT_SIMD
831 && TREE_ADDRESSABLE (tmp)
832 && !TREE_STATIC (tmp))
834 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
835 ctx->add_safelen1 = true;
836 else if (ctx->in_for_exprs)
837 flag = GOVD_PRIVATE;
838 else
839 flag = GOVD_PRIVATE | GOVD_SEEN;
840 break;
842 ctx = ctx->outer_context;
844 if (ctx)
845 omp_add_variable (ctx, tmp, flag);
848 else if (cfun)
849 record_vars (tmp);
850 else
852 gimple_seq body_seq;
854 /* This case is for nested functions. We need to expose the locals
855 they create. */
856 body_seq = gimple_body (current_function_decl);
857 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
863 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
864 nodes that are referenced more than once in GENERIC functions. This is
865 necessary because gimplification (translation into GIMPLE) is performed
866 by modifying tree nodes in-place, so gimplication of a shared node in a
867 first context could generate an invalid GIMPLE form in a second context.
869 This is achieved with a simple mark/copy/unmark algorithm that walks the
870 GENERIC representation top-down, marks nodes with TREE_VISITED the first
871 time it encounters them, duplicates them if they already have TREE_VISITED
872 set, and finally removes the TREE_VISITED marks it has set.
874 The algorithm works only at the function level, i.e. it generates a GENERIC
875 representation of a function with no nodes shared within the function when
876 passed a GENERIC function (except for nodes that are allowed to be shared).
878 At the global level, it is also necessary to unshare tree nodes that are
879 referenced in more than one function, for the same aforementioned reason.
880 This requires some cooperation from the front-end. There are 2 strategies:
882 1. Manual unsharing. The front-end needs to call unshare_expr on every
883 expression that might end up being shared across functions.
885 2. Deep unsharing. This is an extension of regular unsharing. Instead
886 of calling unshare_expr on expressions that might be shared across
887 functions, the front-end pre-marks them with TREE_VISITED. This will
888 ensure that they are unshared on the first reference within functions
889 when the regular unsharing algorithm runs. The counterpart is that
890 this algorithm must look deeper than for manual unsharing, which is
891 specified by LANG_HOOKS_DEEP_UNSHARING.
893 If there are only few specific cases of node sharing across functions, it is
894 probably easier for a front-end to unshare the expressions manually. On the
895 contrary, if the expressions generated at the global level are as widespread
896 as expressions generated within functions, deep unsharing is very likely the
897 way to go. */
899 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
900 These nodes model computations that must be done once. If we were to
901 unshare something like SAVE_EXPR(i++), the gimplification process would
902 create wrong code. However, if DATA is non-null, it must hold a pointer
903 set that is used to unshare the subtrees of these nodes. */
905 static tree
906 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
908 tree t = *tp;
909 enum tree_code code = TREE_CODE (t);
911 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
912 copy their subtrees if we can make sure to do it only once. */
913 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
915 if (data && !((hash_set<tree> *)data)->add (t))
917 else
918 *walk_subtrees = 0;
921 /* Stop at types, decls, constants like copy_tree_r. */
922 else if (TREE_CODE_CLASS (code) == tcc_type
923 || TREE_CODE_CLASS (code) == tcc_declaration
924 || TREE_CODE_CLASS (code) == tcc_constant)
925 *walk_subtrees = 0;
927 /* Cope with the statement expression extension. */
928 else if (code == STATEMENT_LIST)
931 /* Leave the bulk of the work to copy_tree_r itself. */
932 else
933 copy_tree_r (tp, walk_subtrees, NULL);
935 return NULL_TREE;
938 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
939 If *TP has been visited already, then *TP is deeply copied by calling
940 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
942 static tree
943 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
945 tree t = *tp;
946 enum tree_code code = TREE_CODE (t);
948 /* Skip types, decls, and constants. But we do want to look at their
949 types and the bounds of types. Mark them as visited so we properly
950 unmark their subtrees on the unmark pass. If we've already seen them,
951 don't look down further. */
952 if (TREE_CODE_CLASS (code) == tcc_type
953 || TREE_CODE_CLASS (code) == tcc_declaration
954 || TREE_CODE_CLASS (code) == tcc_constant)
956 if (TREE_VISITED (t))
957 *walk_subtrees = 0;
958 else
959 TREE_VISITED (t) = 1;
962 /* If this node has been visited already, unshare it and don't look
963 any deeper. */
964 else if (TREE_VISITED (t))
966 walk_tree (tp, mostly_copy_tree_r, data, NULL);
967 *walk_subtrees = 0;
970 /* Otherwise, mark the node as visited and keep looking. */
971 else
972 TREE_VISITED (t) = 1;
974 return NULL_TREE;
977 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
978 copy_if_shared_r callback unmodified. */
980 void
981 copy_if_shared (tree *tp, void *data)
983 walk_tree (tp, copy_if_shared_r, data, NULL);
986 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
987 any nested functions. */
989 static void
990 unshare_body (tree fndecl)
992 struct cgraph_node *cgn = cgraph_node::get (fndecl);
993 /* If the language requires deep unsharing, we need a pointer set to make
994 sure we don't repeatedly unshare subtrees of unshareable nodes. */
995 hash_set<tree> *visited
996 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
998 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
999 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
1000 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
1002 delete visited;
1004 if (cgn)
1005 for (cgn = first_nested_function (cgn); cgn;
1006 cgn = next_nested_function (cgn))
1007 unshare_body (cgn->decl);
1010 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
1011 Subtrees are walked until the first unvisited node is encountered. */
1013 static tree
1014 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1016 tree t = *tp;
1018 /* If this node has been visited, unmark it and keep looking. */
1019 if (TREE_VISITED (t))
1020 TREE_VISITED (t) = 0;
1022 /* Otherwise, don't look any deeper. */
1023 else
1024 *walk_subtrees = 0;
1026 return NULL_TREE;
1029 /* Unmark the visited trees rooted at *TP. */
1031 static inline void
1032 unmark_visited (tree *tp)
1034 walk_tree (tp, unmark_visited_r, NULL, NULL);
1037 /* Likewise, but mark all trees as not visited. */
1039 static void
1040 unvisit_body (tree fndecl)
1042 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1044 unmark_visited (&DECL_SAVED_TREE (fndecl));
1045 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1046 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1048 if (cgn)
1049 for (cgn = first_nested_function (cgn);
1050 cgn; cgn = next_nested_function (cgn))
1051 unvisit_body (cgn->decl);
1054 /* Unconditionally make an unshared copy of EXPR. This is used when using
1055 stored expressions which span multiple functions, such as BINFO_VTABLE,
1056 as the normal unsharing process can't tell that they're shared. */
1058 tree
1059 unshare_expr (tree expr)
1061 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1062 return expr;
1065 /* Worker for unshare_expr_without_location. */
1067 static tree
1068 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1070 if (EXPR_P (*tp))
1071 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1072 else
1073 *walk_subtrees = 0;
1074 return NULL_TREE;
1077 /* Similar to unshare_expr but also prune all expression locations
1078 from EXPR. */
1080 tree
1081 unshare_expr_without_location (tree expr)
1083 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1084 if (EXPR_P (expr))
1085 walk_tree (&expr, prune_expr_location, NULL, NULL);
1086 return expr;
1089 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1090 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1091 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1092 EXPR is the location of the EXPR. */
1094 static location_t
1095 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1097 if (!expr)
1098 return or_else;
1100 if (EXPR_HAS_LOCATION (expr))
1101 return EXPR_LOCATION (expr);
1103 if (TREE_CODE (expr) != STATEMENT_LIST)
1104 return or_else;
1106 tree_stmt_iterator i = tsi_start (expr);
1108 bool found = false;
1109 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1111 found = true;
1112 tsi_next (&i);
1115 if (!found || !tsi_one_before_end_p (i))
1116 return or_else;
1118 return rexpr_location (tsi_stmt (i), or_else);
1121 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1122 rexpr_location for the potential recursion. */
1124 static inline bool
1125 rexpr_has_location (tree expr)
1127 return rexpr_location (expr) != UNKNOWN_LOCATION;
1131 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1132 contain statements and have a value. Assign its value to a temporary
1133 and give it void_type_node. Return the temporary, or NULL_TREE if
1134 WRAPPER was already void. */
1136 tree
1137 voidify_wrapper_expr (tree wrapper, tree temp)
1139 tree type = TREE_TYPE (wrapper);
1140 if (type && !VOID_TYPE_P (type))
1142 tree *p;
1144 /* Set p to point to the body of the wrapper. Loop until we find
1145 something that isn't a wrapper. */
1146 for (p = &wrapper; p && *p; )
1148 switch (TREE_CODE (*p))
1150 case BIND_EXPR:
1151 TREE_SIDE_EFFECTS (*p) = 1;
1152 TREE_TYPE (*p) = void_type_node;
1153 /* For a BIND_EXPR, the body is operand 1. */
1154 p = &BIND_EXPR_BODY (*p);
1155 break;
1157 case CLEANUP_POINT_EXPR:
1158 case TRY_FINALLY_EXPR:
1159 case TRY_CATCH_EXPR:
1160 TREE_SIDE_EFFECTS (*p) = 1;
1161 TREE_TYPE (*p) = void_type_node;
1162 p = &TREE_OPERAND (*p, 0);
1163 break;
1165 case STATEMENT_LIST:
1167 tree_stmt_iterator i = tsi_last (*p);
1168 TREE_SIDE_EFFECTS (*p) = 1;
1169 TREE_TYPE (*p) = void_type_node;
1170 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1172 break;
1174 case COMPOUND_EXPR:
1175 /* Advance to the last statement. Set all container types to
1176 void. */
1177 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1179 TREE_SIDE_EFFECTS (*p) = 1;
1180 TREE_TYPE (*p) = void_type_node;
1182 break;
1184 case TRANSACTION_EXPR:
1185 TREE_SIDE_EFFECTS (*p) = 1;
1186 TREE_TYPE (*p) = void_type_node;
1187 p = &TRANSACTION_EXPR_BODY (*p);
1188 break;
1190 default:
1191 /* Assume that any tree upon which voidify_wrapper_expr is
1192 directly called is a wrapper, and that its body is op0. */
1193 if (p == &wrapper)
1195 TREE_SIDE_EFFECTS (*p) = 1;
1196 TREE_TYPE (*p) = void_type_node;
1197 p = &TREE_OPERAND (*p, 0);
1198 break;
1200 goto out;
1204 out:
1205 if (p == NULL || IS_EMPTY_STMT (*p))
1206 temp = NULL_TREE;
1207 else if (temp)
1209 /* The wrapper is on the RHS of an assignment that we're pushing
1210 down. */
1211 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1212 || TREE_CODE (temp) == MODIFY_EXPR);
1213 TREE_OPERAND (temp, 1) = *p;
1214 *p = temp;
1216 else
1218 temp = create_tmp_var (type, "retval");
1219 *p = build2 (INIT_EXPR, type, temp, *p);
1222 return temp;
1225 return NULL_TREE;
1228 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1229 a temporary through which they communicate. */
1231 static void
1232 build_stack_save_restore (gcall **save, gcall **restore)
1234 tree tmp_var;
1236 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1237 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1238 gimple_call_set_lhs (*save, tmp_var);
1240 *restore
1241 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1242 1, tmp_var);
1245 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1247 static tree
1248 build_asan_poison_call_expr (tree decl)
1250 /* Do not poison variables that have size equal to zero. */
1251 tree unit_size = DECL_SIZE_UNIT (decl);
1252 if (zerop (unit_size))
1253 return NULL_TREE;
1255 tree base = build_fold_addr_expr (decl);
1257 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1258 void_type_node, 3,
1259 build_int_cst (integer_type_node,
1260 ASAN_MARK_POISON),
1261 base, unit_size);
1264 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1265 on POISON flag, shadow memory of a DECL variable. The call will be
1266 put on location identified by IT iterator, where BEFORE flag drives
1267 position where the stmt will be put. */
1269 static void
1270 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1271 bool before)
1273 tree unit_size = DECL_SIZE_UNIT (decl);
1274 tree base = build_fold_addr_expr (decl);
1276 /* Do not poison variables that have size equal to zero. */
1277 if (zerop (unit_size))
1278 return;
1280 /* It's necessary to have all stack variables aligned to ASAN granularity
1281 bytes. */
1282 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1283 unsigned shadow_granularity
1284 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1285 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1286 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1288 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1290 gimple *g
1291 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1292 build_int_cst (integer_type_node, flags),
1293 base, unit_size);
1295 if (before)
1296 gsi_insert_before (it, g, GSI_NEW_STMT);
1297 else
1298 gsi_insert_after (it, g, GSI_NEW_STMT);
1301 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1302 either poisons or unpoisons a DECL. Created statement is appended
1303 to SEQ_P gimple sequence. */
1305 static void
1306 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1308 gimple_stmt_iterator it = gsi_last (*seq_p);
1309 bool before = false;
1311 if (gsi_end_p (it))
1312 before = true;
1314 asan_poison_variable (decl, poison, &it, before);
1317 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1319 static int
1320 sort_by_decl_uid (const void *a, const void *b)
1322 const tree *t1 = (const tree *)a;
1323 const tree *t2 = (const tree *)b;
1325 int uid1 = DECL_UID (*t1);
1326 int uid2 = DECL_UID (*t2);
1328 if (uid1 < uid2)
1329 return -1;
1330 else if (uid1 > uid2)
1331 return 1;
1332 else
1333 return 0;
1336 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1337 depending on POISON flag. Created statement is appended
1338 to SEQ_P gimple sequence. */
1340 static void
1341 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1343 unsigned c = variables->elements ();
1344 if (c == 0)
1345 return;
1347 auto_vec<tree> sorted_variables (c);
1349 for (hash_set<tree>::iterator it = variables->begin ();
1350 it != variables->end (); ++it)
1351 sorted_variables.safe_push (*it);
1353 sorted_variables.qsort (sort_by_decl_uid);
1355 unsigned i;
1356 tree var;
1357 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1359 asan_poison_variable (var, poison, seq_p);
1361 /* Add use_after_scope_memory attribute for the variable in order
1362 to prevent re-written into SSA. */
1363 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1364 DECL_ATTRIBUTES (var)))
1365 DECL_ATTRIBUTES (var)
1366 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1367 integer_one_node,
1368 DECL_ATTRIBUTES (var));
1372 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1374 static enum gimplify_status
1375 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1377 tree bind_expr = *expr_p;
1378 bool old_keep_stack = gimplify_ctxp->keep_stack;
1379 bool old_save_stack = gimplify_ctxp->save_stack;
1380 tree t;
1381 gbind *bind_stmt;
1382 gimple_seq body, cleanup;
1383 gcall *stack_save;
1384 location_t start_locus = 0, end_locus = 0;
1385 tree ret_clauses = NULL;
1387 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1389 /* Mark variables seen in this bind expr. */
1390 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1392 if (VAR_P (t))
1394 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1395 tree attr;
1397 if (flag_openmp
1398 && !is_global_var (t)
1399 && DECL_CONTEXT (t) == current_function_decl
1400 && TREE_USED (t)
1401 && (attr = lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t)))
1402 != NULL_TREE)
1404 gcc_assert (!DECL_HAS_VALUE_EXPR_P (t));
1405 tree alloc = TREE_PURPOSE (TREE_VALUE (attr));
1406 tree align = TREE_VALUE (TREE_VALUE (attr));
1407 /* Allocate directives that appear in a target region must specify
1408 an allocator clause unless a requires directive with the
1409 dynamic_allocators clause is present in the same compilation
1410 unit. */
1411 bool missing_dyn_alloc = false;
1412 if (alloc == NULL_TREE
1413 && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS)
1414 == 0))
1416 /* This comes too early for omp_discover_declare_target...,
1417 but should at least catch the most common cases. */
1418 missing_dyn_alloc
1419 = cgraph_node::get (current_function_decl)->offloadable;
1420 for (struct gimplify_omp_ctx *ctx2 = ctx;
1421 ctx2 && !missing_dyn_alloc; ctx2 = ctx2->outer_context)
1422 if (ctx2->code == OMP_TARGET)
1423 missing_dyn_alloc = true;
1425 if (missing_dyn_alloc)
1426 error_at (DECL_SOURCE_LOCATION (t),
1427 "%<allocate%> directive for %qD inside a target "
1428 "region must specify an %<allocator%> clause", t);
1429 /* Skip for omp_default_mem_alloc (= 1),
1430 unless align is present. */
1431 else if (!errorcount
1432 && (align != NULL_TREE
1433 || alloc == NULL_TREE
1434 || !integer_onep (alloc)))
1436 /* Fortran might already use a pointer type internally;
1437 use that pointer except for type(C_ptr) and type(C_funptr);
1438 note that normal proc pointers are rejected. */
1439 tree type = TREE_TYPE (t);
1440 tree tmp, v;
1441 if (lang_GNU_Fortran ()
1442 && POINTER_TYPE_P (type)
1443 && TREE_TYPE (type) != void_type_node
1444 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
1446 type = TREE_TYPE (type);
1447 v = t;
1449 else
1451 tmp = build_pointer_type (type);
1452 v = create_tmp_var (tmp, get_name (t));
1453 DECL_IGNORED_P (v) = 0;
1454 DECL_ATTRIBUTES (v)
1455 = tree_cons (get_identifier ("omp allocate var"),
1456 build_tree_list (NULL_TREE, t),
1457 remove_attribute ("omp allocate",
1458 DECL_ATTRIBUTES (t)));
1459 tmp = build_fold_indirect_ref (v);
1460 TREE_THIS_NOTRAP (tmp) = 1;
1461 SET_DECL_VALUE_EXPR (t, tmp);
1462 DECL_HAS_VALUE_EXPR_P (t) = 1;
1464 tree sz = TYPE_SIZE_UNIT (type);
1465 /* The size to use in Fortran might not match TYPE_SIZE_UNIT;
1466 hence, for some decls, a size variable is saved in the
1467 attributes; use it, if available. */
1468 if (TREE_CHAIN (TREE_VALUE (attr))
1469 && TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)))
1470 && TREE_PURPOSE (
1471 TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)))))
1473 sz = TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)));
1474 sz = TREE_PURPOSE (sz);
1476 if (alloc == NULL_TREE)
1477 alloc = build_zero_cst (ptr_type_node);
1478 if (align == NULL_TREE)
1479 align = build_int_cst (size_type_node, DECL_ALIGN_UNIT (t));
1480 else
1481 align = build_int_cst (size_type_node,
1482 MAX (tree_to_uhwi (align),
1483 DECL_ALIGN_UNIT (t)));
1484 location_t loc = DECL_SOURCE_LOCATION (t);
1485 tmp = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
1486 tmp = build_call_expr_loc (loc, tmp, 3, align, sz, alloc);
1487 tmp = fold_build2_loc (loc, MODIFY_EXPR, TREE_TYPE (v), v,
1488 fold_convert (TREE_TYPE (v), tmp));
1489 gcc_assert (BIND_EXPR_BODY (bind_expr) != NULL_TREE);
1490 /* Ensure that either TREE_CHAIN (TREE_VALUE (attr) is set
1491 and GOMP_FREE added here or that DECL_HAS_VALUE_EXPR_P (t)
1492 is set, using in a condition much further below. */
1493 gcc_assert (DECL_HAS_VALUE_EXPR_P (t)
1494 || TREE_CHAIN (TREE_VALUE (attr)));
1495 if (TREE_CHAIN (TREE_VALUE (attr)))
1497 /* Fortran is special as it does not have properly nest
1498 declarations in blocks. And as there is no
1499 initializer, there is also no expression to look for.
1500 Hence, the FE makes the statement list of the
1501 try-finally block available. We can put the GOMP_alloc
1502 at the top, unless an allocator or size expression
1503 requires to put it afterward; note that the size is
1504 always later in generated code; for strings, no
1505 size expr but still an expr might be available.
1506 As LTO does not handle a statement list, 'sl' has
1507 to be removed; done so by removing the attribute. */
1508 DECL_ATTRIBUTES (t)
1509 = remove_attribute ("omp allocate",
1510 DECL_ATTRIBUTES (t));
1511 tree sl = TREE_PURPOSE (TREE_CHAIN (TREE_VALUE (attr)));
1512 tree_stmt_iterator e = tsi_start (sl);
1513 tree needle = NULL_TREE;
1514 if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr))))
1516 needle = TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)));
1517 needle = (TREE_VALUE (needle) ? TREE_VALUE (needle)
1518 : sz);
1520 else if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr))))
1521 needle = sz;
1522 else if (DECL_P (alloc) && DECL_ARTIFICIAL (alloc))
1523 needle = alloc;
1525 if (needle != NULL_TREE)
1527 while (!tsi_end_p (e))
1529 if (*e == needle
1530 || (TREE_CODE (*e) == MODIFY_EXPR
1531 && TREE_OPERAND (*e, 0) == needle))
1532 break;
1533 ++e;
1535 gcc_assert (!tsi_end_p (e));
1537 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1539 /* As the cleanup is in BIND_EXPR_BODY, GOMP_free is added
1540 here; for C/C++ it will be added in the 'cleanup'
1541 section after gimplification. But Fortran already has
1542 a try-finally block. */
1543 sl = TREE_VALUE (TREE_CHAIN (TREE_VALUE (attr)));
1544 e = tsi_last (sl);
1545 tmp = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
1546 tmp = build_call_expr_loc (EXPR_LOCATION (*e), tmp, 2, v,
1547 build_zero_cst (ptr_type_node));
1548 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1549 tmp = build_clobber (TREE_TYPE (v), CLOBBER_STORAGE_END);
1550 tmp = fold_build2_loc (loc, MODIFY_EXPR, TREE_TYPE (v), v,
1551 fold_convert (TREE_TYPE (v), tmp));
1552 ++e;
1553 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1555 else
1557 gcc_assert (TREE_CODE (BIND_EXPR_BODY (bind_expr))
1558 == STATEMENT_LIST);
1559 tree_stmt_iterator e;
1560 e = tsi_start (BIND_EXPR_BODY (bind_expr));
1561 while (!tsi_end_p (e))
1563 if ((TREE_CODE (*e) == DECL_EXPR
1564 && TREE_OPERAND (*e, 0) == t)
1565 || (TREE_CODE (*e) == CLEANUP_POINT_EXPR
1566 && (TREE_CODE (TREE_OPERAND (*e, 0))
1567 == DECL_EXPR)
1568 && (TREE_OPERAND (TREE_OPERAND (*e, 0), 0)
1569 == t)))
1570 break;
1571 ++e;
1573 gcc_assert (!tsi_end_p (e));
1574 tsi_link_before (&e, tmp, TSI_SAME_STMT);
1579 /* Mark variable as local. */
1580 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1582 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1583 || splay_tree_lookup (ctx->variables,
1584 (splay_tree_key) t) == NULL)
1586 int flag = GOVD_LOCAL;
1587 if (ctx->region_type == ORT_SIMD
1588 && TREE_ADDRESSABLE (t)
1589 && !TREE_STATIC (t))
1591 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1592 ctx->add_safelen1 = true;
1593 else
1594 flag = GOVD_PRIVATE;
1596 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1598 /* Static locals inside of target construct or offloaded
1599 routines need to be "omp declare target". */
1600 if (TREE_STATIC (t))
1601 for (; ctx; ctx = ctx->outer_context)
1602 if ((ctx->region_type & ORT_TARGET) != 0)
1604 if (!lookup_attribute ("omp declare target",
1605 DECL_ATTRIBUTES (t)))
1607 tree id = get_identifier ("omp declare target");
1608 DECL_ATTRIBUTES (t)
1609 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1610 varpool_node *node = varpool_node::get (t);
1611 if (node)
1613 node->offloadable = 1;
1614 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1616 g->have_offload = true;
1617 if (!in_lto_p)
1618 vec_safe_push (offload_vars, t);
1622 break;
1626 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1628 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1629 cfun->has_local_explicit_reg_vars = true;
1633 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1634 BIND_EXPR_BLOCK (bind_expr));
1635 gimple_push_bind_expr (bind_stmt);
1637 gimplify_ctxp->keep_stack = false;
1638 gimplify_ctxp->save_stack = false;
1640 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1641 body = NULL;
1642 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1643 gimple_bind_set_body (bind_stmt, body);
1645 /* Source location wise, the cleanup code (stack_restore and clobbers)
1646 belongs to the end of the block, so propagate what we have. The
1647 stack_save operation belongs to the beginning of block, which we can
1648 infer from the bind_expr directly if the block has no explicit
1649 assignment. */
1650 if (BIND_EXPR_BLOCK (bind_expr))
1652 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1653 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1655 if (start_locus == 0)
1656 start_locus = EXPR_LOCATION (bind_expr);
1658 cleanup = NULL;
1659 stack_save = NULL;
1661 /* Add clobbers for all variables that go out of scope. */
1662 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1664 if (VAR_P (t)
1665 && !is_global_var (t)
1666 && DECL_CONTEXT (t) == current_function_decl)
1668 if (flag_openmp
1669 && DECL_HAS_VALUE_EXPR_P (t)
1670 && TREE_USED (t)
1671 && lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t)))
1673 /* For Fortran, TREE_CHAIN (TREE_VALUE (attr)) is set, which
1674 causes that the GOMP_free call is already added above;
1675 and "omp allocate" is removed from DECL_ATTRIBUTES. */
1676 tree v = TREE_OPERAND (DECL_VALUE_EXPR (t), 0);
1677 tree tmp = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
1678 tmp = build_call_expr_loc (end_locus, tmp, 2, v,
1679 build_zero_cst (ptr_type_node));
1680 gimplify_and_add (tmp, &cleanup);
1681 gimple *clobber_stmt;
1682 tmp = build_clobber (TREE_TYPE (v), CLOBBER_STORAGE_END);
1683 clobber_stmt = gimple_build_assign (v, tmp);
1684 gimple_set_location (clobber_stmt, end_locus);
1685 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1687 if (!DECL_HARD_REGISTER (t)
1688 && !TREE_THIS_VOLATILE (t)
1689 && !DECL_HAS_VALUE_EXPR_P (t)
1690 /* Only care for variables that have to be in memory. Others
1691 will be rewritten into SSA names, hence moved to the
1692 top-level. */
1693 && !is_gimple_reg (t)
1694 && flag_stack_reuse != SR_NONE)
1696 tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_STORAGE_END);
1697 gimple *clobber_stmt;
1698 clobber_stmt = gimple_build_assign (t, clobber);
1699 gimple_set_location (clobber_stmt, end_locus);
1700 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1703 if (flag_openacc && oacc_declare_returns != NULL)
1705 tree key = t;
1706 if (DECL_HAS_VALUE_EXPR_P (key))
1708 key = DECL_VALUE_EXPR (key);
1709 if (INDIRECT_REF_P (key))
1710 key = TREE_OPERAND (key, 0);
1712 tree *c = oacc_declare_returns->get (key);
1713 if (c != NULL)
1715 if (ret_clauses)
1716 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1718 ret_clauses = unshare_expr (*c);
1720 oacc_declare_returns->remove (key);
1722 if (oacc_declare_returns->is_empty ())
1724 delete oacc_declare_returns;
1725 oacc_declare_returns = NULL;
1731 if (asan_poisoned_variables != NULL
1732 && asan_poisoned_variables->contains (t))
1734 asan_poisoned_variables->remove (t);
1735 asan_poison_variable (t, true, &cleanup);
1738 if (gimplify_ctxp->live_switch_vars != NULL
1739 && gimplify_ctxp->live_switch_vars->contains (t))
1740 gimplify_ctxp->live_switch_vars->remove (t);
1743 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1744 the stack space allocated to the VLAs. */
1745 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1747 gcall *stack_restore;
1749 /* Save stack on entry and restore it on exit. Add a try_finally
1750 block to achieve this. */
1751 build_stack_save_restore (&stack_save, &stack_restore);
1753 gimple_set_location (stack_save, start_locus);
1754 gimple_set_location (stack_restore, end_locus);
1756 gimplify_seq_add_stmt (&cleanup, stack_restore);
1759 if (ret_clauses)
1761 gomp_target *stmt;
1762 gimple_stmt_iterator si = gsi_start (cleanup);
1764 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1765 ret_clauses);
1766 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1769 if (cleanup)
1771 gtry *gs;
1772 gimple_seq new_body;
1774 new_body = NULL;
1775 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1776 GIMPLE_TRY_FINALLY);
1778 if (stack_save)
1779 gimplify_seq_add_stmt (&new_body, stack_save);
1780 gimplify_seq_add_stmt (&new_body, gs);
1781 gimple_bind_set_body (bind_stmt, new_body);
1784 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1785 if (!gimplify_ctxp->keep_stack)
1786 gimplify_ctxp->keep_stack = old_keep_stack;
1787 gimplify_ctxp->save_stack = old_save_stack;
1789 gimple_pop_bind_expr ();
1791 gimplify_seq_add_stmt (pre_p, bind_stmt);
1793 if (temp)
1795 *expr_p = temp;
1796 return GS_OK;
1799 *expr_p = NULL_TREE;
1800 return GS_ALL_DONE;
1803 /* Maybe add early return predict statement to PRE_P sequence. */
1805 static void
1806 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1808 /* If we are not in a conditional context, add PREDICT statement. */
1809 if (gimple_conditional_context ())
1811 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1812 NOT_TAKEN);
1813 gimplify_seq_add_stmt (pre_p, predict);
1817 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1818 GIMPLE value, it is assigned to a new temporary and the statement is
1819 re-written to return the temporary.
1821 PRE_P points to the sequence where side effects that must happen before
1822 STMT should be stored. */
1824 static enum gimplify_status
1825 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1827 greturn *ret;
1828 tree ret_expr = TREE_OPERAND (stmt, 0);
1829 tree result_decl, result;
1831 if (ret_expr == error_mark_node)
1832 return GS_ERROR;
1834 if (!ret_expr
1835 || TREE_CODE (ret_expr) == RESULT_DECL)
1837 maybe_add_early_return_predict_stmt (pre_p);
1838 greturn *ret = gimple_build_return (ret_expr);
1839 copy_warning (ret, stmt);
1840 gimplify_seq_add_stmt (pre_p, ret);
1841 return GS_ALL_DONE;
1844 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1845 result_decl = NULL_TREE;
1846 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1848 /* Used in C++ for handling EH cleanup of the return value if a local
1849 cleanup throws. Assume the front-end knows what it's doing. */
1850 result_decl = DECL_RESULT (current_function_decl);
1851 /* But crash if we end up trying to modify ret_expr below. */
1852 ret_expr = NULL_TREE;
1854 else
1856 result_decl = TREE_OPERAND (ret_expr, 0);
1858 /* See through a return by reference. */
1859 if (INDIRECT_REF_P (result_decl))
1860 result_decl = TREE_OPERAND (result_decl, 0);
1862 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1863 || TREE_CODE (ret_expr) == INIT_EXPR)
1864 && TREE_CODE (result_decl) == RESULT_DECL);
1867 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1868 Recall that aggregate_value_p is FALSE for any aggregate type that is
1869 returned in registers. If we're returning values in registers, then
1870 we don't want to extend the lifetime of the RESULT_DECL, particularly
1871 across another call. In addition, for those aggregates for which
1872 hard_function_value generates a PARALLEL, we'll die during normal
1873 expansion of structure assignments; there's special code in expand_return
1874 to handle this case that does not exist in expand_expr. */
1875 if (!result_decl)
1876 result = NULL_TREE;
1877 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1879 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1881 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1882 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1883 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1884 should be effectively allocated by the caller, i.e. all calls to
1885 this function must be subject to the Return Slot Optimization. */
1886 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1887 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1889 result = result_decl;
1891 else if (gimplify_ctxp->return_temp)
1892 result = gimplify_ctxp->return_temp;
1893 else
1895 result = create_tmp_reg (TREE_TYPE (result_decl));
1897 /* ??? With complex control flow (usually involving abnormal edges),
1898 we can wind up warning about an uninitialized value for this. Due
1899 to how this variable is constructed and initialized, this is never
1900 true. Give up and never warn. */
1901 suppress_warning (result, OPT_Wuninitialized);
1903 gimplify_ctxp->return_temp = result;
1906 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1907 Then gimplify the whole thing. */
1908 if (result != result_decl)
1909 TREE_OPERAND (ret_expr, 0) = result;
1911 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1913 maybe_add_early_return_predict_stmt (pre_p);
1914 ret = gimple_build_return (result);
1915 copy_warning (ret, stmt);
1916 gimplify_seq_add_stmt (pre_p, ret);
1918 return GS_ALL_DONE;
1921 /* Gimplify a variable-length array DECL. */
1923 static void
1924 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1926 /* This is a variable-sized decl. Simplify its size and mark it
1927 for deferred expansion. */
1928 tree t, addr, ptr_type;
1930 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1931 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1933 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1934 if (DECL_HAS_VALUE_EXPR_P (decl))
1935 return;
1937 /* All occurrences of this decl in final gimplified code will be
1938 replaced by indirection. Setting DECL_VALUE_EXPR does two
1939 things: First, it lets the rest of the gimplifier know what
1940 replacement to use. Second, it lets the debug info know
1941 where to find the value. */
1942 ptr_type = build_pointer_type (TREE_TYPE (decl));
1943 addr = create_tmp_var (ptr_type, get_name (decl));
1944 DECL_IGNORED_P (addr) = 0;
1945 t = build_fold_indirect_ref (addr);
1946 TREE_THIS_NOTRAP (t) = 1;
1947 SET_DECL_VALUE_EXPR (decl, t);
1948 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1950 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1951 max_int_size_in_bytes (TREE_TYPE (decl)));
1952 /* The call has been built for a variable-sized object. */
1953 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1954 t = fold_convert (ptr_type, t);
1955 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1957 gimplify_and_add (t, seq_p);
1959 /* Record the dynamic allocation associated with DECL if requested. */
1960 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1961 record_dynamic_alloc (decl);
1964 /* A helper function to be called via walk_tree. Mark all labels under *TP
1965 as being forced. To be called for DECL_INITIAL of static variables. */
1967 static tree
1968 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1970 if (TYPE_P (*tp))
1971 *walk_subtrees = 0;
1972 if (TREE_CODE (*tp) == LABEL_DECL)
1974 FORCED_LABEL (*tp) = 1;
1975 cfun->has_forced_label_in_static = 1;
1978 return NULL_TREE;
1981 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1982 Build a call to internal const function DEFERRED_INIT:
1983 1st argument: SIZE of the DECL;
1984 2nd argument: INIT_TYPE;
1985 3rd argument: NAME of the DECL;
1987 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1989 static void
1990 gimple_add_init_for_auto_var (tree decl,
1991 enum auto_init_type init_type,
1992 gimple_seq *seq_p)
1994 gcc_assert (auto_var_p (decl));
1995 gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
1996 location_t loc = EXPR_LOCATION (decl);
1997 tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
1999 tree init_type_node
2000 = build_int_cst (integer_type_node, (int) init_type);
2002 tree decl_name = NULL_TREE;
2003 if (DECL_NAME (decl))
2005 decl_name = build_string_literal (DECL_NAME (decl));
2007 else
2009 char decl_name_anonymous[3 + (HOST_BITS_PER_INT + 2) / 3];
2010 sprintf (decl_name_anonymous, "D.%u", DECL_UID (decl));
2011 decl_name = build_string_literal (decl_name_anonymous);
2014 tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
2015 TREE_TYPE (decl), 3,
2016 decl_size, init_type_node,
2017 decl_name);
2019 gimplify_assign (decl, call, seq_p);
2022 /* Generate padding initialization for automatic vairable DECL.
2023 C guarantees that brace-init with fewer initializers than members
2024 aggregate will initialize the rest of the aggregate as-if it were
2025 static initialization. In turn static initialization guarantees
2026 that padding is initialized to zero. So, we always initialize paddings
2027 to zeroes regardless INIT_TYPE.
2028 To do the padding initialization, we insert a call to
2029 __builtin_clear_padding (&decl, 0, for_auto_init = true).
2030 Note, we add an additional dummy argument for __builtin_clear_padding,
2031 'for_auto_init' to distinguish whether this call is for automatic
2032 variable initialization or not.
2034 static void
2035 gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
2036 gimple_seq *seq_p)
2038 tree addr_of_decl = NULL_TREE;
2039 tree fn = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
2041 if (is_vla)
2043 /* The temporary address variable for this vla should be
2044 created in gimplify_vla_decl. */
2045 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
2046 gcc_assert (INDIRECT_REF_P (DECL_VALUE_EXPR (decl)));
2047 addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
2049 else
2051 mark_addressable (decl);
2052 addr_of_decl = build_fold_addr_expr (decl);
2055 gimple *call = gimple_build_call (fn, 2, addr_of_decl,
2056 build_one_cst (TREE_TYPE (addr_of_decl)));
2057 gimplify_seq_add_stmt (seq_p, call);
2060 /* Return true if the DECL need to be automaticly initialized by the
2061 compiler. */
2062 static bool
2063 is_var_need_auto_init (tree decl)
2065 if (auto_var_p (decl)
2066 && (TREE_CODE (decl) != VAR_DECL
2067 || !DECL_HARD_REGISTER (decl))
2068 && (flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2069 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl)))
2070 && !OPAQUE_TYPE_P (TREE_TYPE (decl))
2071 && !is_empty_type (TREE_TYPE (decl)))
2072 return true;
2073 return false;
2076 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
2077 and initialization explicit. */
2079 static enum gimplify_status
2080 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
2082 tree stmt = *stmt_p;
2083 tree decl = DECL_EXPR_DECL (stmt);
2085 *stmt_p = NULL_TREE;
2087 if (TREE_TYPE (decl) == error_mark_node)
2088 return GS_ERROR;
2090 if ((TREE_CODE (decl) == TYPE_DECL
2091 || VAR_P (decl))
2092 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
2094 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
2095 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
2096 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
2099 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
2100 in case its size expressions contain problematic nodes like CALL_EXPR. */
2101 if (TREE_CODE (decl) == TYPE_DECL
2102 && DECL_ORIGINAL_TYPE (decl)
2103 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
2105 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
2106 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
2107 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
2110 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
2112 tree init = DECL_INITIAL (decl);
2113 bool is_vla = false;
2114 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
2115 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
2116 If the decl has VALUE_EXPR that was created by FE (usually
2117 C++FE), it's a proxy varaible, and FE already initialized
2118 the VALUE_EXPR of it, we should not initialize it anymore. */
2119 bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
2121 poly_uint64 size;
2122 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
2123 || (!TREE_STATIC (decl)
2124 && flag_stack_check == GENERIC_STACK_CHECK
2125 && maybe_gt (size,
2126 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
2128 gimplify_vla_decl (decl, seq_p);
2129 is_vla = true;
2132 if (asan_poisoned_variables
2133 && !is_vla
2134 && TREE_ADDRESSABLE (decl)
2135 && !TREE_STATIC (decl)
2136 && !DECL_HAS_VALUE_EXPR_P (decl)
2137 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
2138 && dbg_cnt (asan_use_after_scope)
2139 && !gimplify_omp_ctxp
2140 /* GNAT introduces temporaries to hold return values of calls in
2141 initializers of variables defined in other units, so the
2142 declaration of the variable is discarded completely. We do not
2143 want to issue poison calls for such dropped variables. */
2144 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
2145 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
2147 asan_poisoned_variables->add (decl);
2148 asan_poison_variable (decl, false, seq_p);
2149 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
2150 gimplify_ctxp->live_switch_vars->add (decl);
2153 /* Some front ends do not explicitly declare all anonymous
2154 artificial variables. We compensate here by declaring the
2155 variables, though it would be better if the front ends would
2156 explicitly declare them. */
2157 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
2158 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
2159 gimple_add_tmp_var (decl);
2161 if (init && init != error_mark_node)
2163 if (!TREE_STATIC (decl))
2165 DECL_INITIAL (decl) = NULL_TREE;
2166 init = build2 (INIT_EXPR, void_type_node, decl, init);
2167 gimplify_and_add (init, seq_p);
2168 ggc_free (init);
2169 /* Clear TREE_READONLY if we really have an initialization. */
2170 if (!DECL_INITIAL (decl)
2171 && !omp_privatize_by_reference (decl))
2172 TREE_READONLY (decl) = 0;
2174 else
2175 /* We must still examine initializers for static variables
2176 as they may contain a label address. */
2177 walk_tree (&init, force_labels_r, NULL, NULL);
2179 /* When there is no explicit initializer, if the user requested,
2180 We should insert an artifical initializer for this automatic
2181 variable. */
2182 else if (is_var_need_auto_init (decl)
2183 && !decl_had_value_expr_p)
2185 gimple_add_init_for_auto_var (decl,
2186 flag_auto_var_init,
2187 seq_p);
2188 /* The expanding of a call to the above .DEFERRED_INIT will apply
2189 block initialization to the whole space covered by this variable.
2190 As a result, all the paddings will be initialized to zeroes
2191 for zero initialization and 0xFE byte-repeatable patterns for
2192 pattern initialization.
2193 In order to make the paddings as zeroes for pattern init, We
2194 should add a call to __builtin_clear_padding to clear the
2195 paddings to zero in compatiple with CLANG.
2196 We cannot insert this call if the variable is a gimple register
2197 since __builtin_clear_padding will take the address of the
2198 variable. As a result, if a long double/_Complex long double
2199 variable will spilled into stack later, its padding is 0XFE. */
2200 if (flag_auto_var_init == AUTO_INIT_PATTERN
2201 && !is_gimple_reg (decl)
2202 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
2203 gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
2207 return GS_ALL_DONE;
2210 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
2211 and replacing the LOOP_EXPR with goto, but if the loop contains an
2212 EXIT_EXPR, we need to append a label for it to jump to. */
2214 static enum gimplify_status
2215 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
2217 tree saved_label = gimplify_ctxp->exit_label;
2218 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
2220 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
2222 gimplify_ctxp->exit_label = NULL_TREE;
2224 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
2226 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
2228 if (gimplify_ctxp->exit_label)
2229 gimplify_seq_add_stmt (pre_p,
2230 gimple_build_label (gimplify_ctxp->exit_label));
2232 gimplify_ctxp->exit_label = saved_label;
2234 *expr_p = NULL;
2235 return GS_ALL_DONE;
2238 /* Gimplify a statement list onto a sequence. These may be created either
2239 by an enlightened front-end, or by shortcut_cond_expr. */
2241 static enum gimplify_status
2242 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2244 tree temp = voidify_wrapper_expr (*expr_p, NULL);
2246 tree_stmt_iterator i = tsi_start (*expr_p);
2248 while (!tsi_end_p (i))
2250 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2251 tsi_delink (&i);
2254 if (temp)
2256 *expr_p = temp;
2257 return GS_OK;
2260 return GS_ALL_DONE;
2264 /* Emit warning for the unreachable statment STMT if needed.
2265 Return the gimple itself when the warning is emitted, otherwise
2266 return NULL. */
2267 static gimple *
2268 emit_warn_switch_unreachable (gimple *stmt)
2270 if (gimple_code (stmt) == GIMPLE_GOTO
2271 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2272 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2273 /* Don't warn for compiler-generated gotos. These occur
2274 in Duff's devices, for example. */
2275 return NULL;
2276 else if ((flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2277 && ((gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2278 || (gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2279 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2280 || (is_gimple_assign (stmt)
2281 && gimple_assign_single_p (stmt)
2282 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2283 && gimple_call_internal_p (
2284 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)),
2285 IFN_DEFERRED_INIT))))
2286 /* Don't warn for compiler-generated initializations for
2287 -ftrivial-auto-var-init.
2288 There are 3 cases:
2289 case 1: a call to .DEFERRED_INIT;
2290 case 2: a call to __builtin_clear_padding with the 2nd argument is
2291 present and non-zero;
2292 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2293 that has the LHS of .DEFERRED_INIT as the RHS as following:
2294 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2295 i1 = _1. */
2296 return NULL;
2297 else
2298 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
2299 "statement will never be executed");
2300 return stmt;
2303 /* Callback for walk_gimple_seq. */
2305 static tree
2306 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2307 bool *handled_ops_p,
2308 struct walk_stmt_info *wi)
2310 gimple *stmt = gsi_stmt (*gsi_p);
2311 bool unreachable_issued = wi->info != NULL;
2313 *handled_ops_p = true;
2314 switch (gimple_code (stmt))
2316 case GIMPLE_TRY:
2317 /* A compiler-generated cleanup or a user-written try block.
2318 If it's empty, don't dive into it--that would result in
2319 worse location info. */
2320 if (gimple_try_eval (stmt) == NULL)
2322 if (warn_switch_unreachable && !unreachable_issued)
2323 wi->info = emit_warn_switch_unreachable (stmt);
2325 /* Stop when auto var init warning is not on. */
2326 if (!warn_trivial_auto_var_init)
2327 return integer_zero_node;
2329 /* Fall through. */
2330 case GIMPLE_BIND:
2331 case GIMPLE_CATCH:
2332 case GIMPLE_EH_FILTER:
2333 case GIMPLE_TRANSACTION:
2334 /* Walk the sub-statements. */
2335 *handled_ops_p = false;
2336 break;
2338 case GIMPLE_DEBUG:
2339 /* Ignore these. We may generate them before declarations that
2340 are never executed. If there's something to warn about,
2341 there will be non-debug stmts too, and we'll catch those. */
2342 break;
2344 case GIMPLE_LABEL:
2345 /* Stop till the first Label. */
2346 return integer_zero_node;
2347 case GIMPLE_CALL:
2348 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2350 *handled_ops_p = false;
2351 break;
2353 if (warn_trivial_auto_var_init
2354 && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2355 && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2357 /* Get the variable name from the 3rd argument of call. */
2358 tree var_name = gimple_call_arg (stmt, 2);
2359 var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2360 const char *var_name_str = TREE_STRING_POINTER (var_name);
2362 warning_at (gimple_location (stmt), OPT_Wtrivial_auto_var_init,
2363 "%qs cannot be initialized with"
2364 "%<-ftrivial-auto-var_init%>",
2365 var_name_str);
2366 break;
2369 /* Fall through. */
2370 default:
2371 /* check the first "real" statement (not a decl/lexical scope/...), issue
2372 warning if needed. */
2373 if (warn_switch_unreachable && !unreachable_issued)
2374 wi->info = emit_warn_switch_unreachable (stmt);
2375 /* Stop when auto var init warning is not on. */
2376 if (!warn_trivial_auto_var_init)
2377 return integer_zero_node;
2378 break;
2380 return NULL_TREE;
2384 /* Possibly warn about unreachable statements between switch's controlling
2385 expression and the first case. Also warn about -ftrivial-auto-var-init
2386 cannot initialize the auto variable under such situation.
2387 SEQ is the body of a switch expression. */
2389 static void
2390 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2392 if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2393 /* This warning doesn't play well with Fortran when optimizations
2394 are on. */
2395 || lang_GNU_Fortran ()
2396 || seq == NULL)
2397 return;
2399 struct walk_stmt_info wi;
2401 memset (&wi, 0, sizeof (wi));
2402 walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2406 /* A label entry that pairs label and a location. */
2407 struct label_entry
2409 tree label;
2410 location_t loc;
2413 /* Find LABEL in vector of label entries VEC. */
2415 static struct label_entry *
2416 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2418 unsigned int i;
2419 struct label_entry *l;
2421 FOR_EACH_VEC_ELT (*vec, i, l)
2422 if (l->label == label)
2423 return l;
2424 return NULL;
2427 /* Return true if LABEL, a LABEL_DECL, represents a case label
2428 in a vector of labels CASES. */
2430 static bool
2431 case_label_p (const vec<tree> *cases, tree label)
2433 unsigned int i;
2434 tree l;
2436 FOR_EACH_VEC_ELT (*cases, i, l)
2437 if (CASE_LABEL (l) == label)
2438 return true;
2439 return false;
2442 /* Find the last nondebug statement in a scope STMT. */
2444 static gimple *
2445 last_stmt_in_scope (gimple *stmt)
2447 if (!stmt)
2448 return NULL;
2450 switch (gimple_code (stmt))
2452 case GIMPLE_BIND:
2454 gbind *bind = as_a <gbind *> (stmt);
2455 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2456 return last_stmt_in_scope (stmt);
2459 case GIMPLE_TRY:
2461 gtry *try_stmt = as_a <gtry *> (stmt);
2462 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2463 gimple *last_eval = last_stmt_in_scope (stmt);
2464 if (gimple_stmt_may_fallthru (last_eval)
2465 && (last_eval == NULL
2466 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2467 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2469 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2470 return last_stmt_in_scope (stmt);
2472 else
2473 return last_eval;
2476 case GIMPLE_DEBUG:
2477 gcc_unreachable ();
2479 default:
2480 return stmt;
2484 /* Collect labels that may fall through into LABELS and return the statement
2485 preceding another case label, or a user-defined label. Store a location
2486 useful to give warnings at *PREVLOC (usually the location of the returned
2487 statement or of its surrounding scope). */
2489 static gimple *
2490 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2491 auto_vec <struct label_entry> *labels,
2492 location_t *prevloc)
2494 gimple *prev = NULL;
2496 *prevloc = UNKNOWN_LOCATION;
2499 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2501 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2502 which starts on a GIMPLE_SWITCH and ends with a break label.
2503 Handle that as a single statement that can fall through. */
2504 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2505 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2506 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2507 if (last
2508 && gimple_code (first) == GIMPLE_SWITCH
2509 && gimple_code (last) == GIMPLE_LABEL)
2511 tree label = gimple_label_label (as_a <glabel *> (last));
2512 if (SWITCH_BREAK_LABEL_P (label))
2514 prev = bind;
2515 gsi_next (gsi_p);
2516 continue;
2520 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2521 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2523 /* Nested scope. Only look at the last statement of
2524 the innermost scope. */
2525 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2526 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2527 if (last)
2529 prev = last;
2530 /* It might be a label without a location. Use the
2531 location of the scope then. */
2532 if (!gimple_has_location (prev))
2533 *prevloc = bind_loc;
2535 gsi_next (gsi_p);
2536 continue;
2539 /* Ifs are tricky. */
2540 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2542 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2543 tree false_lab = gimple_cond_false_label (cond_stmt);
2544 location_t if_loc = gimple_location (cond_stmt);
2546 /* If we have e.g.
2547 if (i > 1) goto <D.2259>; else goto D;
2548 we can't do much with the else-branch. */
2549 if (!DECL_ARTIFICIAL (false_lab))
2550 break;
2552 /* Go on until the false label, then one step back. */
2553 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2555 gimple *stmt = gsi_stmt (*gsi_p);
2556 if (gimple_code (stmt) == GIMPLE_LABEL
2557 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2558 break;
2561 /* Not found? Oops. */
2562 if (gsi_end_p (*gsi_p))
2563 break;
2565 /* A dead label can't fall through. */
2566 if (!UNUSED_LABEL_P (false_lab))
2568 struct label_entry l = { false_lab, if_loc };
2569 labels->safe_push (l);
2572 /* Go to the last statement of the then branch. */
2573 gsi_prev (gsi_p);
2575 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2576 <D.1759>:
2577 <stmt>;
2578 goto <D.1761>;
2579 <D.1760>:
2581 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2582 && !gimple_has_location (gsi_stmt (*gsi_p)))
2584 /* Look at the statement before, it might be
2585 attribute fallthrough, in which case don't warn. */
2586 gsi_prev (gsi_p);
2587 bool fallthru_before_dest
2588 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2589 gsi_next (gsi_p);
2590 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2591 if (!fallthru_before_dest)
2593 struct label_entry l = { goto_dest, if_loc };
2594 labels->safe_push (l);
2597 /* This case is about
2598 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2599 <D.2022>:
2600 n = n + 1; // #1
2601 <D.2023>: // #2
2602 <D.1988>: // #3
2603 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2604 through to #3. So set PREV to #1. */
2605 else if (UNUSED_LABEL_P (false_lab))
2606 prev = gsi_stmt (*gsi_p);
2608 /* And move back. */
2609 gsi_next (gsi_p);
2612 /* Remember the last statement. Skip labels that are of no interest
2613 to us. */
2614 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2616 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2617 if (find_label_entry (labels, label))
2618 prev = gsi_stmt (*gsi_p);
2620 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2622 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2624 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2625 prev = gsi_stmt (*gsi_p);
2626 gsi_next (gsi_p);
2628 while (!gsi_end_p (*gsi_p)
2629 /* Stop if we find a case or a user-defined label. */
2630 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2631 || !gimple_has_location (gsi_stmt (*gsi_p))));
2633 if (prev && gimple_has_location (prev))
2634 *prevloc = gimple_location (prev);
2635 return prev;
2638 /* Return true if the switch fallthough warning should occur. LABEL is
2639 the label statement that we're falling through to. */
2641 static bool
2642 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2644 gimple_stmt_iterator gsi = *gsi_p;
2646 /* Don't warn if the label is marked with a "falls through" comment. */
2647 if (FALLTHROUGH_LABEL_P (label))
2648 return false;
2650 /* Don't warn for non-case labels followed by a statement:
2651 case 0:
2652 foo ();
2653 label:
2654 bar ();
2655 as these are likely intentional. */
2656 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2658 tree l;
2659 while (!gsi_end_p (gsi)
2660 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2661 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2662 && !case_label_p (&gimplify_ctxp->case_labels, l))
2663 gsi_next_nondebug (&gsi);
2664 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2665 return false;
2668 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2669 immediately breaks. */
2670 gsi = *gsi_p;
2672 /* Skip all immediately following labels. */
2673 while (!gsi_end_p (gsi)
2674 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2675 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2676 gsi_next_nondebug (&gsi);
2678 /* { ... something; default:; } */
2679 if (gsi_end_p (gsi)
2680 /* { ... something; default: break; } or
2681 { ... something; default: goto L; } */
2682 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2683 /* { ... something; default: return; } */
2684 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2685 return false;
2687 return true;
2690 /* Callback for walk_gimple_seq. */
2692 static tree
2693 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2694 struct walk_stmt_info *)
2696 gimple *stmt = gsi_stmt (*gsi_p);
2698 *handled_ops_p = true;
2699 switch (gimple_code (stmt))
2701 case GIMPLE_TRY:
2702 case GIMPLE_BIND:
2703 case GIMPLE_CATCH:
2704 case GIMPLE_EH_FILTER:
2705 case GIMPLE_TRANSACTION:
2706 /* Walk the sub-statements. */
2707 *handled_ops_p = false;
2708 break;
2710 /* Find a sequence of form:
2712 GIMPLE_LABEL
2713 [...]
2714 <may fallthru stmt>
2715 GIMPLE_LABEL
2717 and possibly warn. */
2718 case GIMPLE_LABEL:
2720 /* Found a label. Skip all immediately following labels. */
2721 while (!gsi_end_p (*gsi_p)
2722 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2723 gsi_next_nondebug (gsi_p);
2725 /* There might be no more statements. */
2726 if (gsi_end_p (*gsi_p))
2727 return integer_zero_node;
2729 /* Vector of labels that fall through. */
2730 auto_vec <struct label_entry> labels;
2731 location_t prevloc;
2732 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2734 /* There might be no more statements. */
2735 if (gsi_end_p (*gsi_p))
2736 return integer_zero_node;
2738 gimple *next = gsi_stmt (*gsi_p);
2739 tree label;
2740 /* If what follows is a label, then we may have a fallthrough. */
2741 if (gimple_code (next) == GIMPLE_LABEL
2742 && gimple_has_location (next)
2743 && (label = gimple_label_label (as_a <glabel *> (next)))
2744 && prev != NULL)
2746 struct label_entry *l;
2747 bool warned_p = false;
2748 auto_diagnostic_group d;
2749 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2750 /* Quiet. */;
2751 else if (gimple_code (prev) == GIMPLE_LABEL
2752 && (label = gimple_label_label (as_a <glabel *> (prev)))
2753 && (l = find_label_entry (&labels, label)))
2754 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2755 "this statement may fall through");
2756 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2757 /* Try to be clever and don't warn when the statement
2758 can't actually fall through. */
2759 && gimple_stmt_may_fallthru (prev)
2760 && prevloc != UNKNOWN_LOCATION)
2761 warned_p = warning_at (prevloc,
2762 OPT_Wimplicit_fallthrough_,
2763 "this statement may fall through");
2764 if (warned_p)
2765 inform (gimple_location (next), "here");
2767 /* Mark this label as processed so as to prevent multiple
2768 warnings in nested switches. */
2769 FALLTHROUGH_LABEL_P (label) = true;
2771 /* So that next warn_implicit_fallthrough_r will start looking for
2772 a new sequence starting with this label. */
2773 gsi_prev (gsi_p);
2776 break;
2777 default:
2778 break;
2780 return NULL_TREE;
2783 /* Warn when a switch case falls through. */
2785 static void
2786 maybe_warn_implicit_fallthrough (gimple_seq seq)
2788 if (!warn_implicit_fallthrough)
2789 return;
2791 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2792 if (!(lang_GNU_C ()
2793 || lang_GNU_CXX ()
2794 || lang_GNU_OBJC ()))
2795 return;
2797 struct walk_stmt_info wi;
2798 memset (&wi, 0, sizeof (wi));
2799 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2802 /* Callback for walk_gimple_seq. */
2804 static tree
2805 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2806 struct walk_stmt_info *wi)
2808 gimple *stmt = gsi_stmt (*gsi_p);
2810 *handled_ops_p = true;
2811 switch (gimple_code (stmt))
2813 case GIMPLE_TRY:
2814 case GIMPLE_BIND:
2815 case GIMPLE_CATCH:
2816 case GIMPLE_EH_FILTER:
2817 case GIMPLE_TRANSACTION:
2818 /* Walk the sub-statements. */
2819 *handled_ops_p = false;
2820 break;
2821 case GIMPLE_CALL:
2822 static_cast<location_t *>(wi->info)[0] = UNKNOWN_LOCATION;
2823 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2825 location_t loc = gimple_location (stmt);
2826 gsi_remove (gsi_p, true);
2827 wi->removed_stmt = true;
2829 /* nothrow flag is added by genericize_c_loop to mark fallthrough
2830 statement at the end of some loop's body. Those should be
2831 always diagnosed, either because they indeed don't precede
2832 a case label or default label, or because the next statement
2833 is not within the same iteration statement. */
2834 if ((stmt->subcode & GF_CALL_NOTHROW) != 0)
2836 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2837 "a case label or default label");
2838 break;
2841 if (gsi_end_p (*gsi_p))
2843 static_cast<location_t *>(wi->info)[0] = BUILTINS_LOCATION;
2844 static_cast<location_t *>(wi->info)[1] = loc;
2845 break;
2848 bool found = false;
2850 gimple_stmt_iterator gsi2 = *gsi_p;
2851 stmt = gsi_stmt (gsi2);
2852 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2854 /* Go on until the artificial label. */
2855 tree goto_dest = gimple_goto_dest (stmt);
2856 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2858 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2859 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2860 == goto_dest)
2861 break;
2864 /* Not found? Stop. */
2865 if (gsi_end_p (gsi2))
2866 break;
2868 /* Look one past it. */
2869 gsi_next (&gsi2);
2872 /* We're looking for a case label or default label here. */
2873 while (!gsi_end_p (gsi2))
2875 stmt = gsi_stmt (gsi2);
2876 if (gimple_code (stmt) == GIMPLE_LABEL)
2878 tree label = gimple_label_label (as_a <glabel *> (stmt));
2879 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2881 found = true;
2882 break;
2885 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2887 else if (!is_gimple_debug (stmt))
2888 /* Anything else is not expected. */
2889 break;
2890 gsi_next (&gsi2);
2892 if (!found)
2893 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2894 "a case label or default label");
2896 break;
2897 default:
2898 static_cast<location_t *>(wi->info)[0] = UNKNOWN_LOCATION;
2899 break;
2901 return NULL_TREE;
2904 /* Expand all FALLTHROUGH () calls in SEQ. */
2906 static void
2907 expand_FALLTHROUGH (gimple_seq *seq_p)
2909 struct walk_stmt_info wi;
2910 location_t loc[2];
2911 memset (&wi, 0, sizeof (wi));
2912 loc[0] = UNKNOWN_LOCATION;
2913 loc[1] = UNKNOWN_LOCATION;
2914 wi.info = (void *) &loc[0];
2915 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2916 if (loc[0] != UNKNOWN_LOCATION)
2917 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2918 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2919 pedwarn (loc[1], 0, "attribute %<fallthrough%> not preceding "
2920 "a case label or default label");
2924 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2925 branch to. */
2927 static enum gimplify_status
2928 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2930 tree switch_expr = *expr_p;
2931 gimple_seq switch_body_seq = NULL;
2932 enum gimplify_status ret;
2933 tree index_type = TREE_TYPE (switch_expr);
2934 if (index_type == NULL_TREE)
2935 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2937 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2938 fb_rvalue);
2939 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2940 return ret;
2942 if (SWITCH_BODY (switch_expr))
2944 vec<tree> labels;
2945 vec<tree> saved_labels;
2946 hash_set<tree> *saved_live_switch_vars = NULL;
2947 tree default_case = NULL_TREE;
2948 gswitch *switch_stmt;
2950 /* Save old labels, get new ones from body, then restore the old
2951 labels. Save all the things from the switch body to append after. */
2952 saved_labels = gimplify_ctxp->case_labels;
2953 gimplify_ctxp->case_labels.create (8);
2955 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2956 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2957 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2958 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2959 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2960 else
2961 gimplify_ctxp->live_switch_vars = NULL;
2963 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2964 gimplify_ctxp->in_switch_expr = true;
2966 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2968 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2969 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq);
2970 maybe_warn_implicit_fallthrough (switch_body_seq);
2971 /* Only do this for the outermost GIMPLE_SWITCH. */
2972 if (!gimplify_ctxp->in_switch_expr)
2973 expand_FALLTHROUGH (&switch_body_seq);
2975 labels = gimplify_ctxp->case_labels;
2976 gimplify_ctxp->case_labels = saved_labels;
2978 if (gimplify_ctxp->live_switch_vars)
2980 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2981 delete gimplify_ctxp->live_switch_vars;
2983 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2985 preprocess_case_label_vec_for_gimple (labels, index_type,
2986 &default_case);
2988 bool add_bind = false;
2989 if (!default_case)
2991 glabel *new_default;
2993 default_case
2994 = build_case_label (NULL_TREE, NULL_TREE,
2995 create_artificial_label (UNKNOWN_LOCATION));
2996 if (old_in_switch_expr)
2998 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2999 add_bind = true;
3001 new_default = gimple_build_label (CASE_LABEL (default_case));
3002 gimplify_seq_add_stmt (&switch_body_seq, new_default);
3004 else if (old_in_switch_expr)
3006 gimple *last = gimple_seq_last_stmt (switch_body_seq);
3007 if (last && gimple_code (last) == GIMPLE_LABEL)
3009 tree label = gimple_label_label (as_a <glabel *> (last));
3010 if (SWITCH_BREAK_LABEL_P (label))
3011 add_bind = true;
3015 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
3016 default_case, labels);
3017 gimple_set_location (switch_stmt, EXPR_LOCATION (switch_expr));
3018 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
3019 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
3020 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
3021 so that we can easily find the start and end of the switch
3022 statement. */
3023 if (add_bind)
3025 gimple_seq bind_body = NULL;
3026 gimplify_seq_add_stmt (&bind_body, switch_stmt);
3027 gimple_seq_add_seq (&bind_body, switch_body_seq);
3028 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
3029 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
3030 gimplify_seq_add_stmt (pre_p, bind);
3032 else
3034 gimplify_seq_add_stmt (pre_p, switch_stmt);
3035 gimplify_seq_add_seq (pre_p, switch_body_seq);
3037 labels.release ();
3039 else
3040 gcc_unreachable ();
3042 return GS_ALL_DONE;
3045 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
3047 static enum gimplify_status
3048 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
3050 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
3051 == current_function_decl);
3053 tree label = LABEL_EXPR_LABEL (*expr_p);
3054 glabel *label_stmt = gimple_build_label (label);
3055 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
3056 gimplify_seq_add_stmt (pre_p, label_stmt);
3058 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
3059 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
3060 NOT_TAKEN));
3061 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
3062 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
3063 TAKEN));
3065 return GS_ALL_DONE;
3068 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
3070 static enum gimplify_status
3071 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
3073 struct gimplify_ctx *ctxp;
3074 glabel *label_stmt;
3076 /* Invalid programs can play Duff's Device type games with, for example,
3077 #pragma omp parallel. At least in the C front end, we don't
3078 detect such invalid branches until after gimplification, in the
3079 diagnose_omp_blocks pass. */
3080 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
3081 if (ctxp->case_labels.exists ())
3082 break;
3084 tree label = CASE_LABEL (*expr_p);
3085 label_stmt = gimple_build_label (label);
3086 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
3087 ctxp->case_labels.safe_push (*expr_p);
3088 gimplify_seq_add_stmt (pre_p, label_stmt);
3090 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
3091 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
3092 NOT_TAKEN));
3093 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
3094 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
3095 TAKEN));
3097 return GS_ALL_DONE;
3100 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
3101 if necessary. */
3103 tree
3104 build_and_jump (tree *label_p)
3106 if (label_p == NULL)
3107 /* If there's nowhere to jump, just fall through. */
3108 return NULL_TREE;
3110 if (*label_p == NULL_TREE)
3112 tree label = create_artificial_label (UNKNOWN_LOCATION);
3113 *label_p = label;
3116 return build1 (GOTO_EXPR, void_type_node, *label_p);
3119 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
3120 This also involves building a label to jump to and communicating it to
3121 gimplify_loop_expr through gimplify_ctxp->exit_label. */
3123 static enum gimplify_status
3124 gimplify_exit_expr (tree *expr_p)
3126 tree cond = TREE_OPERAND (*expr_p, 0);
3127 tree expr;
3129 expr = build_and_jump (&gimplify_ctxp->exit_label);
3130 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
3131 *expr_p = expr;
3133 return GS_OK;
3136 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
3137 different from its canonical type, wrap the whole thing inside a
3138 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
3139 type.
3141 The canonical type of a COMPONENT_REF is the type of the field being
3142 referenced--unless the field is a bit-field which can be read directly
3143 in a smaller mode, in which case the canonical type is the
3144 sign-appropriate type corresponding to that mode. */
3146 static void
3147 canonicalize_component_ref (tree *expr_p)
3149 tree expr = *expr_p;
3150 tree type;
3152 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
3154 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
3155 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
3156 else
3157 type = TREE_TYPE (TREE_OPERAND (expr, 1));
3159 /* One could argue that all the stuff below is not necessary for
3160 the non-bitfield case and declare it a FE error if type
3161 adjustment would be needed. */
3162 if (TREE_TYPE (expr) != type)
3164 #ifdef ENABLE_TYPES_CHECKING
3165 tree old_type = TREE_TYPE (expr);
3166 #endif
3167 int type_quals;
3169 /* We need to preserve qualifiers and propagate them from
3170 operand 0. */
3171 type_quals = TYPE_QUALS (type)
3172 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
3173 if (TYPE_QUALS (type) != type_quals)
3174 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
3176 /* Set the type of the COMPONENT_REF to the underlying type. */
3177 TREE_TYPE (expr) = type;
3179 #ifdef ENABLE_TYPES_CHECKING
3180 /* It is now a FE error, if the conversion from the canonical
3181 type to the original expression type is not useless. */
3182 gcc_assert (useless_type_conversion_p (old_type, type));
3183 #endif
3187 /* If a NOP conversion is changing a pointer to array of foo to a pointer
3188 to foo, embed that change in the ADDR_EXPR by converting
3189 T array[U];
3190 (T *)&array
3192 &array[L]
3193 where L is the lower bound. For simplicity, only do this for constant
3194 lower bound.
3195 The constraint is that the type of &array[L] is trivially convertible
3196 to T *. */
3198 static void
3199 canonicalize_addr_expr (tree *expr_p)
3201 tree expr = *expr_p;
3202 tree addr_expr = TREE_OPERAND (expr, 0);
3203 tree datype, ddatype, pddatype;
3205 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
3206 if (!POINTER_TYPE_P (TREE_TYPE (expr))
3207 || TREE_CODE (addr_expr) != ADDR_EXPR)
3208 return;
3210 /* The addr_expr type should be a pointer to an array. */
3211 datype = TREE_TYPE (TREE_TYPE (addr_expr));
3212 if (TREE_CODE (datype) != ARRAY_TYPE)
3213 return;
3215 /* The pointer to element type shall be trivially convertible to
3216 the expression pointer type. */
3217 ddatype = TREE_TYPE (datype);
3218 pddatype = build_pointer_type (ddatype);
3219 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
3220 pddatype))
3221 return;
3223 /* The lower bound and element sizes must be constant. */
3224 if (!TYPE_SIZE_UNIT (ddatype)
3225 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
3226 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
3227 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
3228 return;
3230 /* All checks succeeded. Build a new node to merge the cast. */
3231 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
3232 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
3233 NULL_TREE, NULL_TREE);
3234 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
3236 /* We can have stripped a required restrict qualifier above. */
3237 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
3238 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
3241 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
3242 underneath as appropriate. */
3244 static enum gimplify_status
3245 gimplify_conversion (tree *expr_p)
3247 location_t loc = EXPR_LOCATION (*expr_p);
3248 gcc_assert (CONVERT_EXPR_P (*expr_p));
3250 /* Then strip away all but the outermost conversion. */
3251 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3253 /* And remove the outermost conversion if it's useless. */
3254 if (tree_ssa_useless_type_conversion (*expr_p))
3255 *expr_p = TREE_OPERAND (*expr_p, 0);
3257 /* If we still have a conversion at the toplevel,
3258 then canonicalize some constructs. */
3259 if (CONVERT_EXPR_P (*expr_p))
3261 tree sub = TREE_OPERAND (*expr_p, 0);
3263 /* If a NOP conversion is changing the type of a COMPONENT_REF
3264 expression, then canonicalize its type now in order to expose more
3265 redundant conversions. */
3266 if (TREE_CODE (sub) == COMPONENT_REF)
3267 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
3269 /* If a NOP conversion is changing a pointer to array of foo
3270 to a pointer to foo, embed that change in the ADDR_EXPR. */
3271 else if (TREE_CODE (sub) == ADDR_EXPR)
3272 canonicalize_addr_expr (expr_p);
3275 /* If we have a conversion to a non-register type force the
3276 use of a VIEW_CONVERT_EXPR instead. */
3277 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3278 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3279 TREE_OPERAND (*expr_p, 0));
3281 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3282 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3283 TREE_SET_CODE (*expr_p, NOP_EXPR);
3285 return GS_OK;
3288 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3289 DECL_VALUE_EXPR, and it's worth re-examining things. */
3291 static enum gimplify_status
3292 gimplify_var_or_parm_decl (tree *expr_p)
3294 tree decl = *expr_p;
3296 /* ??? If this is a local variable, and it has not been seen in any
3297 outer BIND_EXPR, then it's probably the result of a duplicate
3298 declaration, for which we've already issued an error. It would
3299 be really nice if the front end wouldn't leak these at all.
3300 Currently the only known culprit is C++ destructors, as seen
3301 in g++.old-deja/g++.jason/binding.C.
3302 Another possible culpit are size expressions for variably modified
3303 types which are lost in the FE or not gimplified correctly. */
3304 if (VAR_P (decl)
3305 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3306 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3307 && decl_function_context (decl) == current_function_decl)
3309 gcc_assert (seen_error ());
3310 return GS_ERROR;
3313 /* When within an OMP context, notice uses of variables. */
3314 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3315 return GS_ALL_DONE;
3317 /* If the decl is an alias for another expression, substitute it now. */
3318 if (DECL_HAS_VALUE_EXPR_P (decl))
3320 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3321 return GS_OK;
3324 return GS_ALL_DONE;
3327 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3329 static void
3330 recalculate_side_effects (tree t)
3332 enum tree_code code = TREE_CODE (t);
3333 int len = TREE_OPERAND_LENGTH (t);
3334 int i;
3336 switch (TREE_CODE_CLASS (code))
3338 case tcc_expression:
3339 switch (code)
3341 case INIT_EXPR:
3342 case MODIFY_EXPR:
3343 case VA_ARG_EXPR:
3344 case PREDECREMENT_EXPR:
3345 case PREINCREMENT_EXPR:
3346 case POSTDECREMENT_EXPR:
3347 case POSTINCREMENT_EXPR:
3348 /* All of these have side-effects, no matter what their
3349 operands are. */
3350 return;
3352 default:
3353 break;
3355 /* Fall through. */
3357 case tcc_comparison: /* a comparison expression */
3358 case tcc_unary: /* a unary arithmetic expression */
3359 case tcc_binary: /* a binary arithmetic expression */
3360 case tcc_reference: /* a reference */
3361 case tcc_vl_exp: /* a function call */
3362 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3363 for (i = 0; i < len; ++i)
3365 tree op = TREE_OPERAND (t, i);
3366 if (op && TREE_SIDE_EFFECTS (op))
3367 TREE_SIDE_EFFECTS (t) = 1;
3369 break;
3371 case tcc_constant:
3372 /* No side-effects. */
3373 return;
3375 default:
3376 if (code == SSA_NAME)
3377 /* No side-effects. */
3378 return;
3379 gcc_unreachable ();
3383 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3384 node *EXPR_P.
3386 compound_lval
3387 : min_lval '[' val ']'
3388 | min_lval '.' ID
3389 | compound_lval '[' val ']'
3390 | compound_lval '.' ID
3392 This is not part of the original SIMPLE definition, which separates
3393 array and member references, but it seems reasonable to handle them
3394 together. Also, this way we don't run into problems with union
3395 aliasing; gcc requires that for accesses through a union to alias, the
3396 union reference must be explicit, which was not always the case when we
3397 were splitting up array and member refs.
3399 PRE_P points to the sequence where side effects that must happen before
3400 *EXPR_P should be stored.
3402 POST_P points to the sequence where side effects that must happen after
3403 *EXPR_P should be stored. */
3405 static enum gimplify_status
3406 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3407 fallback_t fallback)
3409 tree *p;
3410 enum gimplify_status ret = GS_ALL_DONE, tret;
3411 int i;
3412 location_t loc = EXPR_LOCATION (*expr_p);
3413 tree expr = *expr_p;
3415 /* Create a stack of the subexpressions so later we can walk them in
3416 order from inner to outer. */
3417 auto_vec<tree, 10> expr_stack;
3419 /* We can handle anything that get_inner_reference can deal with. */
3420 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3422 restart:
3423 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3424 if (TREE_CODE (*p) == INDIRECT_REF)
3425 *p = fold_indirect_ref_loc (loc, *p);
3427 if (handled_component_p (*p))
3429 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3430 additional COMPONENT_REFs. */
3431 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3432 && gimplify_var_or_parm_decl (p) == GS_OK)
3433 goto restart;
3434 else
3435 break;
3437 expr_stack.safe_push (*p);
3440 gcc_assert (expr_stack.length ());
3442 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3443 walked through and P points to the innermost expression.
3445 Java requires that we elaborated nodes in source order. That
3446 means we must gimplify the inner expression followed by each of
3447 the indices, in order. But we can't gimplify the inner
3448 expression until we deal with any variable bounds, sizes, or
3449 positions in order to deal with PLACEHOLDER_EXPRs.
3451 The base expression may contain a statement expression that
3452 has declarations used in size expressions, so has to be
3453 gimplified before gimplifying the size expressions.
3455 So we do this in three steps. First we deal with variable
3456 bounds, sizes, and positions, then we gimplify the base and
3457 ensure it is memory if needed, then we deal with the annotations
3458 for any variables in the components and any indices, from left
3459 to right. */
3461 bool need_non_reg = false;
3462 for (i = expr_stack.length () - 1; i >= 0; i--)
3464 tree t = expr_stack[i];
3466 if (error_operand_p (TREE_OPERAND (t, 0)))
3467 return GS_ERROR;
3469 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3471 /* Deal with the low bound and element type size and put them into
3472 the ARRAY_REF. If these values are set, they have already been
3473 gimplified. */
3474 if (TREE_OPERAND (t, 2) == NULL_TREE)
3476 tree low = unshare_expr (array_ref_low_bound (t));
3477 if (!is_gimple_min_invariant (low))
3479 TREE_OPERAND (t, 2) = low;
3483 if (TREE_OPERAND (t, 3) == NULL_TREE)
3485 tree elmt_size = array_ref_element_size (t);
3486 if (!is_gimple_min_invariant (elmt_size))
3488 elmt_size = unshare_expr (elmt_size);
3489 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3490 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3492 /* Divide the element size by the alignment of the element
3493 type (above). */
3494 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3495 elmt_size, factor);
3497 TREE_OPERAND (t, 3) = elmt_size;
3500 need_non_reg = true;
3502 else if (TREE_CODE (t) == COMPONENT_REF)
3504 /* Set the field offset into T and gimplify it. */
3505 if (TREE_OPERAND (t, 2) == NULL_TREE)
3507 tree offset = component_ref_field_offset (t);
3508 if (!is_gimple_min_invariant (offset))
3510 offset = unshare_expr (offset);
3511 tree field = TREE_OPERAND (t, 1);
3512 tree factor
3513 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3515 /* Divide the offset by its alignment. */
3516 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3517 offset, factor);
3519 TREE_OPERAND (t, 2) = offset;
3522 need_non_reg = true;
3524 else if (!is_gimple_reg_type (TREE_TYPE (t)))
3525 /* When the result of an operation, in particular a VIEW_CONVERT_EXPR
3526 is a non-register type then require the base object to be a
3527 non-register as well. */
3528 need_non_reg = true;
3531 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3532 so as to match the min_lval predicate. Failure to do so may result
3533 in the creation of large aggregate temporaries. */
3534 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3535 fallback | fb_lvalue);
3536 ret = MIN (ret, tret);
3537 if (ret == GS_ERROR)
3538 return GS_ERROR;
3540 /* Step 2a: if we have component references we do not support on
3541 registers then make sure the base isn't a register. Of course
3542 we can only do so if an rvalue is OK. */
3543 if (need_non_reg && (fallback & fb_rvalue))
3544 prepare_gimple_addressable (p, pre_p);
3547 /* Step 3: gimplify size expressions and the indices and operands of
3548 ARRAY_REF. During this loop we also remove any useless conversions.
3549 If we operate on a register also make sure to properly gimplify
3550 to individual operations. */
3552 bool reg_operations = is_gimple_reg (*p);
3553 for (; expr_stack.length () > 0; )
3555 tree t = expr_stack.pop ();
3557 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3559 gcc_assert (!reg_operations);
3561 /* Gimplify the low bound and element type size. */
3562 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3563 is_gimple_reg, fb_rvalue);
3564 ret = MIN (ret, tret);
3566 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3567 is_gimple_reg, fb_rvalue);
3568 ret = MIN (ret, tret);
3570 /* Gimplify the dimension. */
3571 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3572 is_gimple_val, fb_rvalue);
3573 ret = MIN (ret, tret);
3575 else if (TREE_CODE (t) == COMPONENT_REF)
3577 gcc_assert (!reg_operations);
3579 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3580 is_gimple_reg, fb_rvalue);
3581 ret = MIN (ret, tret);
3583 else if (reg_operations)
3585 tret = gimplify_expr (&TREE_OPERAND (t, 0), pre_p, post_p,
3586 is_gimple_val, fb_rvalue);
3587 ret = MIN (ret, tret);
3590 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3592 /* The innermost expression P may have originally had
3593 TREE_SIDE_EFFECTS set which would have caused all the outer
3594 expressions in *EXPR_P leading to P to also have had
3595 TREE_SIDE_EFFECTS set. */
3596 recalculate_side_effects (t);
3599 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3600 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3602 canonicalize_component_ref (expr_p);
3605 expr_stack.release ();
3607 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3609 return ret;
3612 /* Gimplify the self modifying expression pointed to by EXPR_P
3613 (++, --, +=, -=).
3615 PRE_P points to the list where side effects that must happen before
3616 *EXPR_P should be stored.
3618 POST_P points to the list where side effects that must happen after
3619 *EXPR_P should be stored.
3621 WANT_VALUE is nonzero iff we want to use the value of this expression
3622 in another expression.
3624 ARITH_TYPE is the type the computation should be performed in. */
3626 enum gimplify_status
3627 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3628 bool want_value, tree arith_type)
3630 enum tree_code code;
3631 tree lhs, lvalue, rhs, t1;
3632 gimple_seq post = NULL, *orig_post_p = post_p;
3633 bool postfix;
3634 enum tree_code arith_code;
3635 enum gimplify_status ret;
3636 location_t loc = EXPR_LOCATION (*expr_p);
3638 code = TREE_CODE (*expr_p);
3640 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3641 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3643 /* Prefix or postfix? */
3644 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3645 /* Faster to treat as prefix if result is not used. */
3646 postfix = want_value;
3647 else
3648 postfix = false;
3650 /* For postfix, make sure the inner expression's post side effects
3651 are executed after side effects from this expression. */
3652 if (postfix)
3653 post_p = &post;
3655 /* Add or subtract? */
3656 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3657 arith_code = PLUS_EXPR;
3658 else
3659 arith_code = MINUS_EXPR;
3661 /* Gimplify the LHS into a GIMPLE lvalue. */
3662 lvalue = TREE_OPERAND (*expr_p, 0);
3663 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3664 if (ret == GS_ERROR)
3665 return ret;
3667 /* Extract the operands to the arithmetic operation. */
3668 lhs = lvalue;
3669 rhs = TREE_OPERAND (*expr_p, 1);
3671 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3672 that as the result value and in the postqueue operation. */
3673 if (postfix)
3675 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3676 if (ret == GS_ERROR)
3677 return ret;
3679 lhs = get_initialized_tmp_var (lhs, pre_p);
3682 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3683 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3685 rhs = convert_to_ptrofftype_loc (loc, rhs);
3686 if (arith_code == MINUS_EXPR)
3687 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3688 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3690 else
3691 t1 = fold_convert (TREE_TYPE (*expr_p),
3692 fold_build2 (arith_code, arith_type,
3693 fold_convert (arith_type, lhs),
3694 fold_convert (arith_type, rhs)));
3696 if (postfix)
3698 gimplify_assign (lvalue, t1, pre_p);
3699 gimplify_seq_add_seq (orig_post_p, post);
3700 *expr_p = lhs;
3701 return GS_ALL_DONE;
3703 else
3705 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3706 return GS_OK;
3710 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3712 static void
3713 maybe_with_size_expr (tree *expr_p)
3715 tree expr = *expr_p;
3716 tree type = TREE_TYPE (expr);
3717 tree size;
3719 /* If we've already wrapped this or the type is error_mark_node, we can't do
3720 anything. */
3721 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3722 || type == error_mark_node)
3723 return;
3725 /* If the size isn't known or is a constant, we have nothing to do. */
3726 size = TYPE_SIZE_UNIT (type);
3727 if (!size || poly_int_tree_p (size))
3728 return;
3730 /* Otherwise, make a WITH_SIZE_EXPR. */
3731 size = unshare_expr (size);
3732 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3733 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3736 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3737 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3738 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3739 gimplified to an SSA name. */
3741 enum gimplify_status
3742 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3743 bool allow_ssa)
3745 bool (*test) (tree);
3746 fallback_t fb;
3748 /* In general, we allow lvalues for function arguments to avoid
3749 extra overhead of copying large aggregates out of even larger
3750 aggregates into temporaries only to copy the temporaries to
3751 the argument list. Make optimizers happy by pulling out to
3752 temporaries those types that fit in registers. */
3753 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3754 test = is_gimple_val, fb = fb_rvalue;
3755 else
3757 test = is_gimple_lvalue, fb = fb_either;
3758 /* Also strip a TARGET_EXPR that would force an extra copy. */
3759 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3761 tree init = TARGET_EXPR_INITIAL (*arg_p);
3762 if (init
3763 && !VOID_TYPE_P (TREE_TYPE (init))
3764 /* Currently, due to c++/116015, it is not desirable to
3765 strip a TARGET_EXPR whose initializer is a {}. The
3766 problem is that if we do elide it, we also have to
3767 replace all the occurrences of the slot temporary in the
3768 initializer with the temporary created for the argument.
3769 But we do not have that temporary yet so the replacement
3770 would be quite awkward and it might be needed to resort
3771 back to a PLACEHOLDER_EXPR. Note that stripping the
3772 TARGET_EXPR wouldn't help anyway, as gimplify_expr would
3773 just allocate a temporary to store the CONSTRUCTOR into.
3774 (FIXME PR116375.)
3776 See convert_for_arg_passing for the C++ code that marks
3777 the TARGET_EXPR as eliding or not. */
3778 && TREE_CODE (init) != CONSTRUCTOR)
3779 *arg_p = init;
3783 /* If this is a variable sized type, we must remember the size. */
3784 maybe_with_size_expr (arg_p);
3786 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3787 /* Make sure arguments have the same location as the function call
3788 itself. */
3789 protected_set_expr_location (*arg_p, call_location);
3791 /* There is a sequence point before a function call. Side effects in
3792 the argument list must occur before the actual call. So, when
3793 gimplifying arguments, force gimplify_expr to use an internal
3794 post queue which is then appended to the end of PRE_P. */
3795 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3798 /* Don't fold inside offloading or taskreg regions: it can break code by
3799 adding decl references that weren't in the source. We'll do it during
3800 omplower pass instead. */
3802 static bool
3803 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3805 struct gimplify_omp_ctx *ctx;
3806 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3807 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3808 return false;
3809 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3810 return false;
3811 /* Delay folding of builtins until the IL is in consistent state
3812 so the diagnostic machinery can do a better job. */
3813 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3814 return false;
3815 return fold_stmt (gsi);
3818 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3819 WANT_VALUE is true if the result of the call is desired. */
3821 static enum gimplify_status
3822 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3824 tree fndecl, parms, p, fnptrtype;
3825 enum gimplify_status ret;
3826 int i, nargs;
3827 gcall *call;
3828 bool builtin_va_start_p = false;
3829 location_t loc = EXPR_LOCATION (*expr_p);
3831 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3833 /* For reliable diagnostics during inlining, it is necessary that
3834 every call_expr be annotated with file and line. */
3835 if (! EXPR_HAS_LOCATION (*expr_p))
3836 SET_EXPR_LOCATION (*expr_p, input_location);
3838 /* Gimplify internal functions created in the FEs. */
3839 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3841 if (want_value)
3842 return GS_ALL_DONE;
3844 nargs = call_expr_nargs (*expr_p);
3845 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3846 auto_vec<tree> vargs (nargs);
3848 if (ifn == IFN_ASSUME)
3850 if (simple_condition_p (CALL_EXPR_ARG (*expr_p, 0)))
3852 /* If the [[assume (cond)]]; condition is simple
3853 enough and can be evaluated unconditionally
3854 without side-effects, expand it as
3855 if (!cond) __builtin_unreachable (); */
3856 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
3857 *expr_p = build3 (COND_EXPR, void_type_node,
3858 CALL_EXPR_ARG (*expr_p, 0), void_node,
3859 build_call_expr_loc (EXPR_LOCATION (*expr_p),
3860 fndecl, 0));
3861 return GS_OK;
3863 /* If not optimizing, ignore the assumptions. */
3864 if (!optimize || seen_error ())
3866 *expr_p = NULL_TREE;
3867 return GS_ALL_DONE;
3869 /* Temporarily, until gimple lowering, transform
3870 .ASSUME (cond);
3871 into:
3872 [[assume (guard)]]
3874 guard = cond;
3876 such that gimple lowering can outline the condition into
3877 a separate function easily. */
3878 tree guard = create_tmp_var (boolean_type_node);
3879 *expr_p = build2 (MODIFY_EXPR, void_type_node, guard,
3880 gimple_boolify (CALL_EXPR_ARG (*expr_p, 0)));
3881 *expr_p = build3 (BIND_EXPR, void_type_node, NULL, *expr_p, NULL);
3882 push_gimplify_context ();
3883 gimple_seq body = NULL;
3884 gimple *g = gimplify_and_return_first (*expr_p, &body);
3885 pop_gimplify_context (g);
3886 g = gimple_build_assume (guard, body);
3887 gimple_set_location (g, loc);
3888 gimplify_seq_add_stmt (pre_p, g);
3889 *expr_p = NULL_TREE;
3890 return GS_ALL_DONE;
3893 for (i = 0; i < nargs; i++)
3895 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3896 EXPR_LOCATION (*expr_p));
3897 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3900 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3901 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3902 gimplify_seq_add_stmt (pre_p, call);
3903 return GS_ALL_DONE;
3906 /* This may be a call to a builtin function.
3908 Builtin function calls may be transformed into different
3909 (and more efficient) builtin function calls under certain
3910 circumstances. Unfortunately, gimplification can muck things
3911 up enough that the builtin expanders are not aware that certain
3912 transformations are still valid.
3914 So we attempt transformation/gimplification of the call before
3915 we gimplify the CALL_EXPR. At this time we do not manage to
3916 transform all calls in the same manner as the expanders do, but
3917 we do transform most of them. */
3918 fndecl = get_callee_fndecl (*expr_p);
3919 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3920 switch (DECL_FUNCTION_CODE (fndecl))
3922 CASE_BUILT_IN_ALLOCA:
3923 /* If the call has been built for a variable-sized object, then we
3924 want to restore the stack level when the enclosing BIND_EXPR is
3925 exited to reclaim the allocated space; otherwise, we precisely
3926 need to do the opposite and preserve the latest stack level. */
3927 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3928 gimplify_ctxp->save_stack = true;
3929 else
3930 gimplify_ctxp->keep_stack = true;
3931 break;
3933 case BUILT_IN_VA_START:
3935 builtin_va_start_p = true;
3936 if (call_expr_nargs (*expr_p) < 2)
3938 error ("too few arguments to function %<va_start%>");
3939 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3940 return GS_OK;
3943 if (fold_builtin_next_arg (*expr_p, true))
3945 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3946 return GS_OK;
3948 break;
3951 case BUILT_IN_EH_RETURN:
3952 cfun->calls_eh_return = true;
3953 break;
3955 case BUILT_IN_CLEAR_PADDING:
3956 if (call_expr_nargs (*expr_p) == 1)
3958 /* Remember the original type of the argument in an internal
3959 dummy second argument, as in GIMPLE pointer conversions are
3960 useless. Also mark this call as not for automatic
3961 initialization in the internal dummy third argument. */
3962 p = CALL_EXPR_ARG (*expr_p, 0);
3963 *expr_p
3964 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3965 build_zero_cst (TREE_TYPE (p)));
3966 return GS_OK;
3968 break;
3970 default:
3973 if (fndecl && fndecl_built_in_p (fndecl))
3975 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3976 if (new_tree && new_tree != *expr_p)
3978 /* There was a transformation of this call which computes the
3979 same value, but in a more efficient way. Return and try
3980 again. */
3981 *expr_p = new_tree;
3982 return GS_OK;
3986 /* Remember the original function pointer type. */
3987 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3989 if (flag_openmp
3990 && fndecl
3991 && cfun
3992 && (cfun->curr_properties & PROP_gimple_any) == 0)
3994 tree variant = omp_resolve_declare_variant (fndecl);
3995 if (variant != fndecl)
3996 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3999 /* There is a sequence point before the call, so any side effects in
4000 the calling expression must occur before the actual call. Force
4001 gimplify_expr to use an internal post queue. */
4002 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
4003 is_gimple_call_addr, fb_rvalue);
4005 if (ret == GS_ERROR)
4006 return GS_ERROR;
4008 nargs = call_expr_nargs (*expr_p);
4010 /* Get argument types for verification. */
4011 fndecl = get_callee_fndecl (*expr_p);
4012 parms = NULL_TREE;
4013 if (fndecl)
4014 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4015 else
4016 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
4018 if (fndecl && DECL_ARGUMENTS (fndecl))
4019 p = DECL_ARGUMENTS (fndecl);
4020 else if (parms)
4021 p = parms;
4022 else
4023 p = NULL_TREE;
4024 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
4027 /* If the last argument is __builtin_va_arg_pack () and it is not
4028 passed as a named argument, decrease the number of CALL_EXPR
4029 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
4030 if (!p
4031 && i < nargs
4032 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
4034 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
4035 tree last_arg_fndecl = get_callee_fndecl (last_arg);
4037 if (last_arg_fndecl
4038 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
4040 tree call = *expr_p;
4042 --nargs;
4043 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
4044 CALL_EXPR_FN (call),
4045 nargs, CALL_EXPR_ARGP (call));
4047 /* Copy all CALL_EXPR flags, location and block, except
4048 CALL_EXPR_VA_ARG_PACK flag. */
4049 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
4050 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
4051 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
4052 = CALL_EXPR_RETURN_SLOT_OPT (call);
4053 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
4054 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
4056 /* Set CALL_EXPR_VA_ARG_PACK. */
4057 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
4061 /* If the call returns twice then after building the CFG the call
4062 argument computations will no longer dominate the call because
4063 we add an abnormal incoming edge to the call. So do not use SSA
4064 vars there. */
4065 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
4067 /* Gimplify the function arguments. */
4068 if (nargs > 0)
4070 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
4071 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
4072 PUSH_ARGS_REVERSED ? i-- : i++)
4074 enum gimplify_status t;
4076 /* Avoid gimplifying the second argument to va_start, which needs to
4077 be the plain PARM_DECL. */
4078 if ((i != 1) || !builtin_va_start_p)
4080 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
4081 EXPR_LOCATION (*expr_p), ! returns_twice);
4083 if (t == GS_ERROR)
4084 ret = GS_ERROR;
4089 /* Gimplify the static chain. */
4090 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
4092 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
4093 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
4094 else
4096 enum gimplify_status t;
4097 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
4098 EXPR_LOCATION (*expr_p), ! returns_twice);
4099 if (t == GS_ERROR)
4100 ret = GS_ERROR;
4104 /* Verify the function result. */
4105 if (want_value && fndecl
4106 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
4108 error_at (loc, "using result of function returning %<void%>");
4109 ret = GS_ERROR;
4112 /* Try this again in case gimplification exposed something. */
4113 if (ret != GS_ERROR)
4115 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
4117 if (new_tree && new_tree != *expr_p)
4119 /* There was a transformation of this call which computes the
4120 same value, but in a more efficient way. Return and try
4121 again. */
4122 *expr_p = new_tree;
4123 return GS_OK;
4126 else
4128 *expr_p = error_mark_node;
4129 return GS_ERROR;
4132 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
4133 decl. This allows us to eliminate redundant or useless
4134 calls to "const" functions. */
4135 if (TREE_CODE (*expr_p) == CALL_EXPR)
4137 int flags = call_expr_flags (*expr_p);
4138 if (flags & (ECF_CONST | ECF_PURE)
4139 /* An infinite loop is considered a side effect. */
4140 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
4141 TREE_SIDE_EFFECTS (*expr_p) = 0;
4144 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
4145 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
4146 form and delegate the creation of a GIMPLE_CALL to
4147 gimplify_modify_expr. This is always possible because when
4148 WANT_VALUE is true, the caller wants the result of this call into
4149 a temporary, which means that we will emit an INIT_EXPR in
4150 internal_get_tmp_var which will then be handled by
4151 gimplify_modify_expr. */
4152 if (!want_value)
4154 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
4155 have to do is replicate it as a GIMPLE_CALL tuple. */
4156 gimple_stmt_iterator gsi;
4157 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
4158 notice_special_calls (call);
4159 gimplify_seq_add_stmt (pre_p, call);
4160 gsi = gsi_last (*pre_p);
4161 maybe_fold_stmt (&gsi);
4162 *expr_p = NULL_TREE;
4164 else
4165 /* Remember the original function type. */
4166 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
4167 CALL_EXPR_FN (*expr_p));
4169 return ret;
4172 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
4173 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
4175 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
4176 condition is true or false, respectively. If null, we should generate
4177 our own to skip over the evaluation of this specific expression.
4179 LOCUS is the source location of the COND_EXPR.
4181 The condition_uid is a discriminator tag for condition coverage used to map
4182 conditions to its corresponding full Boolean function.
4184 This function is the tree equivalent of do_jump.
4186 shortcut_cond_r should only be called by shortcut_cond_expr. */
4188 static tree
4189 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
4190 location_t locus, unsigned condition_uid)
4192 tree local_label = NULL_TREE;
4193 tree t, expr = NULL;
4195 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
4196 retain the shortcut semantics. Just insert the gotos here;
4197 shortcut_cond_expr will append the real blocks later. */
4198 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4200 location_t new_locus;
4202 /* Turn if (a && b) into
4204 if (a); else goto no;
4205 if (b) goto yes; else goto no;
4206 (no:) */
4208 if (false_label_p == NULL)
4209 false_label_p = &local_label;
4211 /* Keep the original source location on the first 'if'. */
4212 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus,
4213 condition_uid);
4214 append_to_statement_list (t, &expr);
4216 /* Set the source location of the && on the second 'if'. */
4217 new_locus = rexpr_location (pred, locus);
4218 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
4219 new_locus, condition_uid);
4220 append_to_statement_list (t, &expr);
4222 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4224 location_t new_locus;
4226 /* Turn if (a || b) into
4228 if (a) goto yes;
4229 if (b) goto yes; else goto no;
4230 (yes:) */
4232 if (true_label_p == NULL)
4233 true_label_p = &local_label;
4235 /* Keep the original source location on the first 'if'. */
4236 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus,
4237 condition_uid);
4238 append_to_statement_list (t, &expr);
4240 /* Set the source location of the || on the second 'if'. */
4241 new_locus = rexpr_location (pred, locus);
4242 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
4243 new_locus, condition_uid);
4244 append_to_statement_list (t, &expr);
4246 else if (TREE_CODE (pred) == COND_EXPR
4247 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
4248 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
4250 location_t new_locus;
4252 /* As long as we're messing with gotos, turn if (a ? b : c) into
4253 if (a)
4254 if (b) goto yes; else goto no;
4255 else
4256 if (c) goto yes; else goto no;
4258 Don't do this if one of the arms has void type, which can happen
4259 in C++ when the arm is throw. */
4261 /* Keep the original source location on the first 'if'. Set the source
4262 location of the ? on the second 'if'. */
4263 new_locus = rexpr_location (pred, locus);
4264 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
4265 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
4266 false_label_p, locus, condition_uid),
4267 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
4268 false_label_p, new_locus,
4269 condition_uid));
4270 SET_EXPR_UID (expr, condition_uid);
4272 else
4274 expr = build3 (COND_EXPR, void_type_node, pred,
4275 build_and_jump (true_label_p),
4276 build_and_jump (false_label_p));
4277 SET_EXPR_LOCATION (expr, locus);
4278 SET_EXPR_UID (expr, condition_uid);
4281 if (local_label)
4283 t = build1 (LABEL_EXPR, void_type_node, local_label);
4284 append_to_statement_list (t, &expr);
4287 return expr;
4290 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
4291 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
4292 statement, if it is the last one. Otherwise, return NULL. */
4294 static tree
4295 find_goto (tree expr)
4297 if (!expr)
4298 return NULL_TREE;
4300 if (TREE_CODE (expr) == GOTO_EXPR)
4301 return expr;
4303 if (TREE_CODE (expr) != STATEMENT_LIST)
4304 return NULL_TREE;
4306 tree_stmt_iterator i = tsi_start (expr);
4308 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
4309 tsi_next (&i);
4311 if (!tsi_one_before_end_p (i))
4312 return NULL_TREE;
4314 return find_goto (tsi_stmt (i));
4317 /* Same as find_goto, except that it returns NULL if the destination
4318 is not a LABEL_DECL. */
4320 static inline tree
4321 find_goto_label (tree expr)
4323 tree dest = find_goto (expr);
4324 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
4325 return dest;
4326 return NULL_TREE;
4330 /* Given a multi-term condition (ANDIF, ORIF), walk the predicate PRED and tag
4331 every basic condition with CONDITION_UID. Two basic conditions share the
4332 CONDITION_UID discriminator when they belong to the same predicate, which is
4333 used by the condition coverage. Doing this as an explicit step makes for a
4334 simpler implementation than weaving it into the splitting code as the
4335 splitting code eventually calls the entry point gimplfiy_expr which makes
4336 bookkeeping complicated. */
4337 static void
4338 tag_shortcut_cond (tree pred, unsigned condition_uid)
4340 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR
4341 || TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4343 tree fst = TREE_OPERAND (pred, 0);
4344 tree lst = TREE_OPERAND (pred, 1);
4346 if (TREE_CODE (fst) == TRUTH_ANDIF_EXPR
4347 || TREE_CODE (fst) == TRUTH_ORIF_EXPR)
4348 tag_shortcut_cond (fst, condition_uid);
4349 else if (TREE_CODE (fst) == COND_EXPR)
4350 SET_EXPR_UID (fst, condition_uid);
4352 if (TREE_CODE (lst) == TRUTH_ANDIF_EXPR
4353 || TREE_CODE (lst) == TRUTH_ORIF_EXPR)
4354 tag_shortcut_cond (lst, condition_uid);
4355 else if (TREE_CODE (lst) == COND_EXPR)
4356 SET_EXPR_UID (lst, condition_uid);
4360 /* Given a conditional expression EXPR with short-circuit boolean
4361 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4362 predicate apart into the equivalent sequence of conditionals. CONDITION_UID
4363 is a the tag/discriminator for this EXPR - all basic conditions in the
4364 expression will be given the same CONDITION_UID. */
4365 static tree
4366 shortcut_cond_expr (tree expr, unsigned condition_uid)
4368 tree pred = TREE_OPERAND (expr, 0);
4369 tree then_ = TREE_OPERAND (expr, 1);
4370 tree else_ = TREE_OPERAND (expr, 2);
4371 tree true_label, false_label, end_label, t;
4372 tree *true_label_p;
4373 tree *false_label_p;
4374 bool emit_end, emit_false, jump_over_else;
4375 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
4376 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
4378 tag_shortcut_cond (pred, condition_uid);
4380 /* First do simple transformations. */
4381 if (!else_se)
4383 /* If there is no 'else', turn
4384 if (a && b) then c
4385 into
4386 if (a) if (b) then c. */
4387 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4389 /* Keep the original source location on the first 'if'. */
4390 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4391 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4392 /* Set the source location of the && on the second 'if'. */
4393 if (rexpr_has_location (pred))
4394 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4395 then_ = shortcut_cond_expr (expr, condition_uid);
4396 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4397 pred = TREE_OPERAND (pred, 0);
4398 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
4399 SET_EXPR_LOCATION (expr, locus);
4403 if (!then_se)
4405 /* If there is no 'then', turn
4406 if (a || b); else d
4407 into
4408 if (a); else if (b); else d. */
4409 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4411 /* Keep the original source location on the first 'if'. */
4412 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4413 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4414 /* Set the source location of the || on the second 'if'. */
4415 if (rexpr_has_location (pred))
4416 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4417 else_ = shortcut_cond_expr (expr, condition_uid);
4418 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4419 pred = TREE_OPERAND (pred, 0);
4420 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
4421 SET_EXPR_LOCATION (expr, locus);
4425 /* The expr tree should also have the expression id set. */
4426 SET_EXPR_UID (expr, condition_uid);
4428 /* If we're done, great. */
4429 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
4430 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
4431 return expr;
4433 /* Otherwise we need to mess with gotos. Change
4434 if (a) c; else d;
4436 if (a); else goto no;
4437 c; goto end;
4438 no: d; end:
4439 and recursively gimplify the condition. */
4441 true_label = false_label = end_label = NULL_TREE;
4443 /* If our arms just jump somewhere, hijack those labels so we don't
4444 generate jumps to jumps. */
4446 if (tree then_goto = find_goto_label (then_))
4448 true_label = GOTO_DESTINATION (then_goto);
4449 then_ = NULL;
4450 then_se = false;
4453 if (tree else_goto = find_goto_label (else_))
4455 false_label = GOTO_DESTINATION (else_goto);
4456 else_ = NULL;
4457 else_se = false;
4460 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4461 if (true_label)
4462 true_label_p = &true_label;
4463 else
4464 true_label_p = NULL;
4466 /* The 'else' branch also needs a label if it contains interesting code. */
4467 if (false_label || else_se)
4468 false_label_p = &false_label;
4469 else
4470 false_label_p = NULL;
4472 /* If there was nothing else in our arms, just forward the label(s). */
4473 if (!then_se && !else_se)
4474 return shortcut_cond_r (pred, true_label_p, false_label_p,
4475 EXPR_LOC_OR_LOC (expr, input_location), condition_uid);
4477 /* If our last subexpression already has a terminal label, reuse it. */
4478 if (else_se)
4479 t = expr_last (else_);
4480 else if (then_se)
4481 t = expr_last (then_);
4482 else
4483 t = NULL;
4484 if (t && TREE_CODE (t) == LABEL_EXPR)
4485 end_label = LABEL_EXPR_LABEL (t);
4487 /* If we don't care about jumping to the 'else' branch, jump to the end
4488 if the condition is false. */
4489 if (!false_label_p)
4490 false_label_p = &end_label;
4492 /* We only want to emit these labels if we aren't hijacking them. */
4493 emit_end = (end_label == NULL_TREE);
4494 emit_false = (false_label == NULL_TREE);
4496 /* We only emit the jump over the else clause if we have to--if the
4497 then clause may fall through. Otherwise we can wind up with a
4498 useless jump and a useless label at the end of gimplified code,
4499 which will cause us to think that this conditional as a whole
4500 falls through even if it doesn't. If we then inline a function
4501 which ends with such a condition, that can cause us to issue an
4502 inappropriate warning about control reaching the end of a
4503 non-void function. */
4504 jump_over_else = block_may_fallthru (then_);
4506 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
4507 EXPR_LOC_OR_LOC (expr, input_location),
4508 condition_uid);
4510 expr = NULL;
4511 append_to_statement_list (pred, &expr);
4513 append_to_statement_list (then_, &expr);
4514 if (else_se)
4516 if (jump_over_else)
4518 tree last = expr_last (expr);
4519 t = build_and_jump (&end_label);
4520 if (rexpr_has_location (last))
4521 SET_EXPR_LOCATION (t, rexpr_location (last));
4522 append_to_statement_list (t, &expr);
4524 if (emit_false)
4526 t = build1 (LABEL_EXPR, void_type_node, false_label);
4527 append_to_statement_list (t, &expr);
4529 append_to_statement_list (else_, &expr);
4531 if (emit_end && end_label)
4533 t = build1 (LABEL_EXPR, void_type_node, end_label);
4534 append_to_statement_list (t, &expr);
4537 return expr;
4540 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4542 tree
4543 gimple_boolify (tree expr)
4545 tree type = TREE_TYPE (expr);
4546 location_t loc = EXPR_LOCATION (expr);
4548 if (TREE_CODE (expr) == NE_EXPR
4549 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
4550 && integer_zerop (TREE_OPERAND (expr, 1)))
4552 tree call = TREE_OPERAND (expr, 0);
4553 tree fn = get_callee_fndecl (call);
4555 /* For __builtin_expect ((long) (x), y) recurse into x as well
4556 if x is truth_value_p. */
4557 if (fn
4558 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
4559 && call_expr_nargs (call) == 2)
4561 tree arg = CALL_EXPR_ARG (call, 0);
4562 if (arg)
4564 if (TREE_CODE (arg) == NOP_EXPR
4565 && TREE_TYPE (arg) == TREE_TYPE (call))
4566 arg = TREE_OPERAND (arg, 0);
4567 if (truth_value_p (TREE_CODE (arg)))
4569 arg = gimple_boolify (arg);
4570 CALL_EXPR_ARG (call, 0)
4571 = fold_convert_loc (loc, TREE_TYPE (call), arg);
4577 switch (TREE_CODE (expr))
4579 case TRUTH_AND_EXPR:
4580 case TRUTH_OR_EXPR:
4581 case TRUTH_XOR_EXPR:
4582 case TRUTH_ANDIF_EXPR:
4583 case TRUTH_ORIF_EXPR:
4584 /* Also boolify the arguments of truth exprs. */
4585 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
4586 /* FALLTHRU */
4588 case TRUTH_NOT_EXPR:
4589 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4591 /* These expressions always produce boolean results. */
4592 if (TREE_CODE (type) != BOOLEAN_TYPE)
4593 TREE_TYPE (expr) = boolean_type_node;
4594 return expr;
4596 case ANNOTATE_EXPR:
4597 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4599 case annot_expr_ivdep_kind:
4600 case annot_expr_unroll_kind:
4601 case annot_expr_no_vector_kind:
4602 case annot_expr_vector_kind:
4603 case annot_expr_parallel_kind:
4604 case annot_expr_maybe_infinite_kind:
4605 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4606 if (TREE_CODE (type) != BOOLEAN_TYPE)
4607 TREE_TYPE (expr) = boolean_type_node;
4608 return expr;
4609 default:
4610 gcc_unreachable ();
4613 default:
4614 if (COMPARISON_CLASS_P (expr))
4616 /* These expressions always produce boolean results. */
4617 if (TREE_CODE (type) != BOOLEAN_TYPE)
4618 TREE_TYPE (expr) = boolean_type_node;
4619 return expr;
4621 /* Other expressions that get here must have boolean values, but
4622 might need to be converted to the appropriate mode. */
4623 if (TREE_CODE (type) == BOOLEAN_TYPE)
4624 return expr;
4625 return fold_convert_loc (loc, boolean_type_node, expr);
4629 /* Given a conditional expression *EXPR_P without side effects, gimplify
4630 its operands. New statements are inserted to PRE_P. */
4632 static enum gimplify_status
4633 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4635 tree expr = *expr_p, cond;
4636 enum gimplify_status ret, tret;
4637 enum tree_code code;
4639 cond = gimple_boolify (COND_EXPR_COND (expr));
4641 /* We need to handle && and || specially, as their gimplification
4642 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4643 code = TREE_CODE (cond);
4644 if (code == TRUTH_ANDIF_EXPR)
4645 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4646 else if (code == TRUTH_ORIF_EXPR)
4647 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4648 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_val, fb_rvalue);
4649 COND_EXPR_COND (*expr_p) = cond;
4651 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4652 is_gimple_val, fb_rvalue);
4653 ret = MIN (ret, tret);
4654 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4655 is_gimple_val, fb_rvalue);
4657 return MIN (ret, tret);
4660 /* Return true if evaluating EXPR could trap.
4661 EXPR is GENERIC, while tree_could_trap_p can be called
4662 only on GIMPLE. */
4664 bool
4665 generic_expr_could_trap_p (tree expr)
4667 unsigned i, n;
4669 if (!expr || is_gimple_val (expr))
4670 return false;
4672 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4673 return true;
4675 n = TREE_OPERAND_LENGTH (expr);
4676 for (i = 0; i < n; i++)
4677 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4678 return true;
4680 return false;
4683 /* Associate the condition STMT with the discriminator UID. STMTs that are
4684 broken down with ANDIF/ORIF from the same Boolean expression should be given
4685 the same UID; 'if (a && b && c) { if (d || e) ... } ...' should yield the
4686 { a: 1, b: 1, c: 1, d: 2, e: 2 } when gimplification is done. This is used
4687 for condition coverage. */
4688 static void
4689 gimple_associate_condition_with_expr (struct function *fn, gcond *stmt,
4690 unsigned uid)
4692 if (!condition_coverage_flag)
4693 return;
4695 if (!fn->cond_uids)
4696 fn->cond_uids = new hash_map <gcond*, unsigned> ();
4698 fn->cond_uids->put (stmt, uid);
4701 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4702 into
4704 if (p) if (p)
4705 t1 = a; a;
4706 else or else
4707 t1 = b; b;
4710 The second form is used when *EXPR_P is of type void.
4712 PRE_P points to the list where side effects that must happen before
4713 *EXPR_P should be stored. */
4715 static enum gimplify_status
4716 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4718 tree expr = *expr_p;
4719 tree type = TREE_TYPE (expr);
4720 location_t loc = EXPR_LOCATION (expr);
4721 tree tmp, arm1, arm2;
4722 enum gimplify_status ret;
4723 tree label_true, label_false, label_cont;
4724 bool have_then_clause_p, have_else_clause_p;
4725 gcond *cond_stmt;
4726 enum tree_code pred_code;
4727 gimple_seq seq = NULL;
4729 /* If this COND_EXPR has a value, copy the values into a temporary within
4730 the arms. */
4731 if (!VOID_TYPE_P (type))
4733 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4734 tree result;
4736 /* If either an rvalue is ok or we do not require an lvalue, create the
4737 temporary. But we cannot do that if the type is addressable. */
4738 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4739 && !TREE_ADDRESSABLE (type))
4741 if (gimplify_ctxp->allow_rhs_cond_expr
4742 /* If either branch has side effects or could trap, it can't be
4743 evaluated unconditionally. */
4744 && !TREE_SIDE_EFFECTS (then_)
4745 && !generic_expr_could_trap_p (then_)
4746 && !TREE_SIDE_EFFECTS (else_)
4747 && !generic_expr_could_trap_p (else_))
4748 return gimplify_pure_cond_expr (expr_p, pre_p);
4750 tmp = create_tmp_var (type, "iftmp");
4751 result = tmp;
4754 /* Otherwise, only create and copy references to the values. */
4755 else
4757 type = build_pointer_type (type);
4759 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4760 then_ = build_fold_addr_expr_loc (loc, then_);
4762 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4763 else_ = build_fold_addr_expr_loc (loc, else_);
4765 expr
4766 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4768 tmp = create_tmp_var (type, "iftmp");
4769 result = build_simple_mem_ref_loc (loc, tmp);
4772 /* Build the new then clause, `tmp = then_;'. But don't build the
4773 assignment if the value is void; in C++ it can be if it's a throw. */
4774 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4775 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4777 /* Similarly, build the new else clause, `tmp = else_;'. */
4778 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4779 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4781 TREE_TYPE (expr) = void_type_node;
4782 recalculate_side_effects (expr);
4784 /* Move the COND_EXPR to the prequeue. */
4785 gimplify_stmt (&expr, pre_p);
4787 *expr_p = result;
4788 return GS_ALL_DONE;
4791 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4792 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4793 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4794 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4796 /* Make sure the condition has BOOLEAN_TYPE. */
4797 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4799 /* Break apart && and || conditions. */
4800 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4801 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4803 expr = shortcut_cond_expr (expr, next_cond_uid ());
4805 if (expr != *expr_p)
4807 *expr_p = expr;
4809 /* We can't rely on gimplify_expr to re-gimplify the expanded
4810 form properly, as cleanups might cause the target labels to be
4811 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4812 set up a conditional context. */
4813 gimple_push_condition ();
4814 gimplify_stmt (expr_p, &seq);
4815 gimple_pop_condition (pre_p);
4816 gimple_seq_add_seq (pre_p, seq);
4818 return GS_ALL_DONE;
4822 /* Now do the normal gimplification. */
4824 /* Gimplify condition. */
4825 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4826 is_gimple_condexpr_for_cond, fb_rvalue);
4827 if (ret == GS_ERROR)
4828 return GS_ERROR;
4829 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4831 gimple_push_condition ();
4833 have_then_clause_p = have_else_clause_p = false;
4834 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4835 if (label_true
4836 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4837 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4838 have different locations, otherwise we end up with incorrect
4839 location information on the branches. */
4840 && (optimize
4841 || !EXPR_HAS_LOCATION (expr)
4842 || !rexpr_has_location (label_true)
4843 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4845 have_then_clause_p = true;
4846 label_true = GOTO_DESTINATION (label_true);
4848 else
4849 label_true = create_artificial_label (UNKNOWN_LOCATION);
4850 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4851 if (label_false
4852 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4853 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4854 have different locations, otherwise we end up with incorrect
4855 location information on the branches. */
4856 && (optimize
4857 || !EXPR_HAS_LOCATION (expr)
4858 || !rexpr_has_location (label_false)
4859 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4861 have_else_clause_p = true;
4862 label_false = GOTO_DESTINATION (label_false);
4864 else
4865 label_false = create_artificial_label (UNKNOWN_LOCATION);
4867 unsigned cond_uid = EXPR_COND_UID (expr);
4868 if (cond_uid == 0)
4869 cond_uid = next_cond_uid ();
4871 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4872 &arm2);
4873 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4874 label_false);
4875 gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
4876 gimple_associate_condition_with_expr (cfun, cond_stmt, cond_uid);
4877 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4878 gimplify_seq_add_stmt (&seq, cond_stmt);
4879 gimple_stmt_iterator gsi = gsi_last (seq);
4880 maybe_fold_stmt (&gsi);
4882 label_cont = NULL_TREE;
4883 if (!have_then_clause_p)
4885 /* For if (...) {} else { code; } put label_true after
4886 the else block. */
4887 if (TREE_OPERAND (expr, 1) == NULL_TREE
4888 && !have_else_clause_p
4889 && TREE_OPERAND (expr, 2) != NULL_TREE)
4891 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4892 handling that label_cont == label_true can be only reached
4893 through fallthrough from { code; }. */
4894 if (integer_zerop (COND_EXPR_COND (expr)))
4895 UNUSED_LABEL_P (label_true) = 1;
4896 label_cont = label_true;
4898 else
4900 bool then_side_effects
4901 = (TREE_OPERAND (expr, 1)
4902 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
4903 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4904 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4905 /* For if (...) { code; } else {} or
4906 if (...) { code; } else goto label; or
4907 if (...) { code; return; } else { ... }
4908 label_cont isn't needed. */
4909 if (!have_else_clause_p
4910 && TREE_OPERAND (expr, 2) != NULL_TREE
4911 && gimple_seq_may_fallthru (seq))
4913 gimple *g;
4914 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4916 /* For if (0) { non-side-effect-code } else { code }
4917 tell -Wimplicit-fallthrough handling that label_cont can
4918 be only reached through fallthrough from { code }. */
4919 if (integer_zerop (COND_EXPR_COND (expr)))
4921 UNUSED_LABEL_P (label_true) = 1;
4922 if (!then_side_effects)
4923 UNUSED_LABEL_P (label_cont) = 1;
4926 g = gimple_build_goto (label_cont);
4928 /* GIMPLE_COND's are very low level; they have embedded
4929 gotos. This particular embedded goto should not be marked
4930 with the location of the original COND_EXPR, as it would
4931 correspond to the COND_EXPR's condition, not the ELSE or the
4932 THEN arms. To avoid marking it with the wrong location, flag
4933 it as "no location". */
4934 gimple_set_do_not_emit_location (g);
4936 gimplify_seq_add_stmt (&seq, g);
4940 if (!have_else_clause_p)
4942 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4943 tell -Wimplicit-fallthrough handling that label_false can be only
4944 reached through fallthrough from { code }. */
4945 if (integer_nonzerop (COND_EXPR_COND (expr))
4946 && (TREE_OPERAND (expr, 2) == NULL_TREE
4947 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
4948 UNUSED_LABEL_P (label_false) = 1;
4949 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4950 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4952 if (label_cont)
4953 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4955 gimple_pop_condition (pre_p);
4956 gimple_seq_add_seq (pre_p, seq);
4958 if (ret == GS_ERROR)
4959 ; /* Do nothing. */
4960 else if (have_then_clause_p || have_else_clause_p)
4961 ret = GS_ALL_DONE;
4962 else
4964 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4965 expr = TREE_OPERAND (expr, 0);
4966 gimplify_stmt (&expr, pre_p);
4969 *expr_p = NULL;
4970 return ret;
4973 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4974 to be marked addressable.
4976 We cannot rely on such an expression being directly markable if a temporary
4977 has been created by the gimplification. In this case, we create another
4978 temporary and initialize it with a copy, which will become a store after we
4979 mark it addressable. This can happen if the front-end passed us something
4980 that it could not mark addressable yet, like a Fortran pass-by-reference
4981 parameter (int) floatvar. */
4983 static void
4984 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4986 while (handled_component_p (*expr_p))
4987 expr_p = &TREE_OPERAND (*expr_p, 0);
4989 /* Do not allow an SSA name as the temporary. */
4990 if (is_gimple_reg (*expr_p))
4991 *expr_p = internal_get_tmp_var (*expr_p, seq_p, NULL, false, false, true);
4994 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4995 a call to __builtin_memcpy. */
4997 static enum gimplify_status
4998 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4999 gimple_seq *seq_p)
5001 tree t, to, to_ptr, from, from_ptr;
5002 gcall *gs;
5003 location_t loc = EXPR_LOCATION (*expr_p);
5005 to = TREE_OPERAND (*expr_p, 0);
5006 from = TREE_OPERAND (*expr_p, 1);
5007 gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to)))
5008 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from))));
5010 /* Mark the RHS addressable. Beware that it may not be possible to do so
5011 directly if a temporary has been created by the gimplification. */
5012 prepare_gimple_addressable (&from, seq_p);
5014 mark_addressable (from);
5015 from_ptr = build_fold_addr_expr_loc (loc, from);
5016 gimplify_arg (&from_ptr, seq_p, loc);
5018 mark_addressable (to);
5019 to_ptr = build_fold_addr_expr_loc (loc, to);
5020 gimplify_arg (&to_ptr, seq_p, loc);
5022 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
5024 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
5025 gimple_call_set_alloca_for_var (gs, true);
5027 if (want_value)
5029 /* tmp = memcpy() */
5030 t = create_tmp_var (TREE_TYPE (to_ptr));
5031 gimple_call_set_lhs (gs, t);
5032 gimplify_seq_add_stmt (seq_p, gs);
5034 *expr_p = build_simple_mem_ref (t);
5035 return GS_ALL_DONE;
5038 gimplify_seq_add_stmt (seq_p, gs);
5039 *expr_p = NULL;
5040 return GS_ALL_DONE;
5043 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
5044 a call to __builtin_memset. In this case we know that the RHS is
5045 a CONSTRUCTOR with an empty element list. */
5047 static enum gimplify_status
5048 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
5049 gimple_seq *seq_p)
5051 tree t, from, to, to_ptr;
5052 gcall *gs;
5053 location_t loc = EXPR_LOCATION (*expr_p);
5055 /* Assert our assumptions, to abort instead of producing wrong code
5056 silently if they are not met. Beware that the RHS CONSTRUCTOR might
5057 not be immediately exposed. */
5058 from = TREE_OPERAND (*expr_p, 1);
5059 if (TREE_CODE (from) == WITH_SIZE_EXPR)
5060 from = TREE_OPERAND (from, 0);
5062 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
5063 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
5065 /* Now proceed. */
5066 to = TREE_OPERAND (*expr_p, 0);
5067 gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to))));
5069 to_ptr = build_fold_addr_expr_loc (loc, to);
5070 gimplify_arg (&to_ptr, seq_p, loc);
5071 t = builtin_decl_implicit (BUILT_IN_MEMSET);
5073 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
5075 if (want_value)
5077 /* tmp = memset() */
5078 t = create_tmp_var (TREE_TYPE (to_ptr));
5079 gimple_call_set_lhs (gs, t);
5080 gimplify_seq_add_stmt (seq_p, gs);
5082 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
5083 return GS_ALL_DONE;
5086 gimplify_seq_add_stmt (seq_p, gs);
5087 *expr_p = NULL;
5088 return GS_ALL_DONE;
5091 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
5092 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
5093 assignment. Return non-null if we detect a potential overlap. */
5095 struct gimplify_init_ctor_preeval_data
5097 /* The base decl of the lhs object. May be NULL, in which case we
5098 have to assume the lhs is indirect. */
5099 tree lhs_base_decl;
5101 /* The alias set of the lhs object. */
5102 alias_set_type lhs_alias_set;
5105 static tree
5106 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
5108 struct gimplify_init_ctor_preeval_data *data
5109 = (struct gimplify_init_ctor_preeval_data *) xdata;
5110 tree t = *tp;
5112 /* If we find the base object, obviously we have overlap. */
5113 if (data->lhs_base_decl == t)
5114 return t;
5116 /* If the constructor component is indirect, determine if we have a
5117 potential overlap with the lhs. The only bits of information we
5118 have to go on at this point are addressability and alias sets. */
5119 if ((INDIRECT_REF_P (t)
5120 || TREE_CODE (t) == MEM_REF)
5121 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
5122 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
5123 return t;
5125 /* If the constructor component is a call, determine if it can hide a
5126 potential overlap with the lhs through an INDIRECT_REF like above.
5127 ??? Ugh - this is completely broken. In fact this whole analysis
5128 doesn't look conservative. */
5129 if (TREE_CODE (t) == CALL_EXPR)
5131 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
5133 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
5134 if (POINTER_TYPE_P (TREE_VALUE (type))
5135 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
5136 && alias_sets_conflict_p (data->lhs_alias_set,
5137 get_alias_set
5138 (TREE_TYPE (TREE_VALUE (type)))))
5139 return t;
5142 if (IS_TYPE_OR_DECL_P (t))
5143 *walk_subtrees = 0;
5144 return NULL;
5147 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
5148 force values that overlap with the lhs (as described by *DATA)
5149 into temporaries. */
5151 static void
5152 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5153 struct gimplify_init_ctor_preeval_data *data)
5155 enum gimplify_status one;
5157 /* If the value is constant, then there's nothing to pre-evaluate. */
5158 if (TREE_CONSTANT (*expr_p))
5160 /* Ensure it does not have side effects, it might contain a reference to
5161 the object we're initializing. */
5162 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
5163 return;
5166 /* If the type has non-trivial constructors, we can't pre-evaluate. */
5167 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
5168 return;
5170 /* Recurse for nested constructors. */
5171 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
5173 unsigned HOST_WIDE_INT ix;
5174 constructor_elt *ce;
5175 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
5177 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
5178 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
5180 return;
5183 /* If this is a variable sized type, we must remember the size. */
5184 maybe_with_size_expr (expr_p);
5186 /* Gimplify the constructor element to something appropriate for the rhs
5187 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
5188 the gimplifier will consider this a store to memory. Doing this
5189 gimplification now means that we won't have to deal with complicated
5190 language-specific trees, nor trees like SAVE_EXPR that can induce
5191 exponential search behavior. */
5192 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
5193 if (one == GS_ERROR)
5195 *expr_p = NULL;
5196 return;
5199 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
5200 with the lhs, since "a = { .x=a }" doesn't make sense. This will
5201 always be true for all scalars, since is_gimple_mem_rhs insists on a
5202 temporary variable for them. */
5203 if (DECL_P (*expr_p))
5204 return;
5206 /* If this is of variable size, we have no choice but to assume it doesn't
5207 overlap since we can't make a temporary for it. */
5208 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
5209 return;
5211 /* Otherwise, we must search for overlap ... */
5212 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
5213 return;
5215 /* ... and if found, force the value into a temporary. */
5216 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
5219 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
5220 a RANGE_EXPR in a CONSTRUCTOR for an array.
5222 var = lower;
5223 loop_entry:
5224 object[var] = value;
5225 if (var == upper)
5226 goto loop_exit;
5227 var = var + 1;
5228 goto loop_entry;
5229 loop_exit:
5231 We increment var _after_ the loop exit check because we might otherwise
5232 fail if upper == TYPE_MAX_VALUE (type for upper).
5234 Note that we never have to deal with SAVE_EXPRs here, because this has
5235 already been taken care of for us, in gimplify_init_ctor_preeval(). */
5237 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
5238 gimple_seq *, bool);
5240 static void
5241 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
5242 tree value, tree array_elt_type,
5243 gimple_seq *pre_p, bool cleared)
5245 tree loop_entry_label, loop_exit_label, fall_thru_label;
5246 tree var, var_type, cref, tmp;
5248 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
5249 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
5250 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
5252 /* Create and initialize the index variable. */
5253 var_type = TREE_TYPE (upper);
5254 var = create_tmp_var (var_type);
5255 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
5257 /* Add the loop entry label. */
5258 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
5260 /* Build the reference. */
5261 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
5262 var, NULL_TREE, NULL_TREE);
5264 /* If we are a constructor, just call gimplify_init_ctor_eval to do
5265 the store. Otherwise just assign value to the reference. */
5267 if (TREE_CODE (value) == CONSTRUCTOR)
5268 /* NB we might have to call ourself recursively through
5269 gimplify_init_ctor_eval if the value is a constructor. */
5270 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5271 pre_p, cleared);
5272 else
5274 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
5275 != GS_ERROR)
5276 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
5279 /* We exit the loop when the index var is equal to the upper bound. */
5280 gimplify_seq_add_stmt (pre_p,
5281 gimple_build_cond (EQ_EXPR, var, upper,
5282 loop_exit_label, fall_thru_label));
5284 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
5286 /* Otherwise, increment the index var... */
5287 tmp = build2 (PLUS_EXPR, var_type, var,
5288 fold_convert (var_type, integer_one_node));
5289 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
5291 /* ...and jump back to the loop entry. */
5292 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
5294 /* Add the loop exit label. */
5295 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
5298 /* A subroutine of gimplify_init_constructor. Generate individual
5299 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
5300 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
5301 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
5302 zeroed first. */
5304 static void
5305 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
5306 gimple_seq *pre_p, bool cleared)
5308 tree array_elt_type = NULL;
5309 unsigned HOST_WIDE_INT ix;
5310 tree purpose, value;
5312 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
5313 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
5315 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
5317 tree cref;
5319 /* NULL values are created above for gimplification errors. */
5320 if (value == NULL)
5321 continue;
5323 if (cleared && initializer_zerop (value))
5324 continue;
5326 /* ??? Here's to hoping the front end fills in all of the indices,
5327 so we don't have to figure out what's missing ourselves. */
5328 gcc_assert (purpose);
5330 /* Skip zero-sized fields, unless value has side-effects. This can
5331 happen with calls to functions returning a empty type, which
5332 we shouldn't discard. As a number of downstream passes don't
5333 expect sets of empty type fields, we rely on the gimplification of
5334 the MODIFY_EXPR we make below to drop the assignment statement. */
5335 if (!TREE_SIDE_EFFECTS (value)
5336 && TREE_CODE (purpose) == FIELD_DECL
5337 && is_empty_type (TREE_TYPE (purpose)))
5338 continue;
5340 /* If we have a RANGE_EXPR, we have to build a loop to assign the
5341 whole range. */
5342 if (TREE_CODE (purpose) == RANGE_EXPR)
5344 tree lower = TREE_OPERAND (purpose, 0);
5345 tree upper = TREE_OPERAND (purpose, 1);
5347 /* If the lower bound is equal to upper, just treat it as if
5348 upper was the index. */
5349 if (simple_cst_equal (lower, upper))
5350 purpose = upper;
5351 else
5353 gimplify_init_ctor_eval_range (object, lower, upper, value,
5354 array_elt_type, pre_p, cleared);
5355 continue;
5359 if (array_elt_type)
5361 /* Do not use bitsizetype for ARRAY_REF indices. */
5362 if (TYPE_DOMAIN (TREE_TYPE (object)))
5363 purpose
5364 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
5365 purpose);
5366 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
5367 purpose, NULL_TREE, NULL_TREE);
5369 else
5371 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
5372 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
5373 unshare_expr (object), purpose, NULL_TREE);
5376 if (TREE_CODE (value) == CONSTRUCTOR
5377 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
5378 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5379 pre_p, cleared);
5380 else
5382 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
5383 gimplify_and_add (init, pre_p);
5384 ggc_free (init);
5389 /* Return the appropriate RHS predicate for this LHS. */
5391 gimple_predicate
5392 rhs_predicate_for (tree lhs)
5394 if (is_gimple_reg (lhs))
5395 return is_gimple_reg_rhs_or_call;
5396 else
5397 return is_gimple_mem_rhs_or_call;
5400 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5401 before the LHS has been gimplified. */
5403 static gimple_predicate
5404 initial_rhs_predicate_for (tree lhs)
5406 if (is_gimple_reg_type (TREE_TYPE (lhs)))
5407 return is_gimple_reg_rhs_or_call;
5408 else
5409 return is_gimple_mem_rhs_or_call;
5412 /* Gimplify a C99 compound literal expression. This just means adding
5413 the DECL_EXPR before the current statement and using its anonymous
5414 decl instead. */
5416 static enum gimplify_status
5417 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
5418 bool (*gimple_test_f) (tree),
5419 fallback_t fallback)
5421 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
5422 tree decl = DECL_EXPR_DECL (decl_s);
5423 tree init = DECL_INITIAL (decl);
5424 /* Mark the decl as addressable if the compound literal
5425 expression is addressable now, otherwise it is marked too late
5426 after we gimplify the initialization expression. */
5427 if (TREE_ADDRESSABLE (*expr_p))
5428 TREE_ADDRESSABLE (decl) = 1;
5429 /* Otherwise, if we don't need an lvalue and have a literal directly
5430 substitute it. Check if it matches the gimple predicate, as
5431 otherwise we'd generate a new temporary, and we can as well just
5432 use the decl we already have. */
5433 else if (!TREE_ADDRESSABLE (decl)
5434 && !TREE_THIS_VOLATILE (decl)
5435 && init
5436 && (fallback & fb_lvalue) == 0
5437 && gimple_test_f (init))
5439 *expr_p = init;
5440 return GS_OK;
5443 /* If the decl is not addressable, then it is being used in some
5444 expression or on the right hand side of a statement, and it can
5445 be put into a readonly data section. */
5446 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
5447 TREE_READONLY (decl) = 1;
5449 /* This decl isn't mentioned in the enclosing block, so add it to the
5450 list of temps. FIXME it seems a bit of a kludge to say that
5451 anonymous artificial vars aren't pushed, but everything else is. */
5452 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
5453 gimple_add_tmp_var (decl);
5455 gimplify_and_add (decl_s, pre_p);
5456 *expr_p = decl;
5457 return GS_OK;
5460 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5461 return a new CONSTRUCTOR if something changed. */
5463 static tree
5464 optimize_compound_literals_in_ctor (tree orig_ctor)
5466 tree ctor = orig_ctor;
5467 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
5468 unsigned int idx, num = vec_safe_length (elts);
5470 for (idx = 0; idx < num; idx++)
5472 tree value = (*elts)[idx].value;
5473 tree newval = value;
5474 if (TREE_CODE (value) == CONSTRUCTOR)
5475 newval = optimize_compound_literals_in_ctor (value);
5476 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
5478 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
5479 tree decl = DECL_EXPR_DECL (decl_s);
5480 tree init = DECL_INITIAL (decl);
5482 if (!TREE_ADDRESSABLE (value)
5483 && !TREE_ADDRESSABLE (decl)
5484 && init
5485 && TREE_CODE (init) == CONSTRUCTOR)
5486 newval = optimize_compound_literals_in_ctor (init);
5488 if (newval == value)
5489 continue;
5491 if (ctor == orig_ctor)
5493 ctor = copy_node (orig_ctor);
5494 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
5495 elts = CONSTRUCTOR_ELTS (ctor);
5497 (*elts)[idx].value = newval;
5499 return ctor;
5502 /* A subroutine of gimplify_modify_expr. Break out elements of a
5503 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5505 Note that we still need to clear any elements that don't have explicit
5506 initializers, so if not all elements are initialized we keep the
5507 original MODIFY_EXPR, we just remove all of the constructor elements.
5509 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5510 GS_ERROR if we would have to create a temporary when gimplifying
5511 this constructor. Otherwise, return GS_OK.
5513 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5515 static enum gimplify_status
5516 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5517 bool want_value, bool notify_temp_creation)
5519 tree object, ctor, type;
5520 enum gimplify_status ret;
5521 vec<constructor_elt, va_gc> *elts;
5522 bool cleared = false;
5523 bool is_empty_ctor = false;
5524 bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
5526 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
5528 if (!notify_temp_creation)
5530 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5531 is_gimple_lvalue, fb_lvalue);
5532 if (ret == GS_ERROR)
5533 return ret;
5536 object = TREE_OPERAND (*expr_p, 0);
5537 ctor = TREE_OPERAND (*expr_p, 1)
5538 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
5539 type = TREE_TYPE (ctor);
5540 elts = CONSTRUCTOR_ELTS (ctor);
5541 ret = GS_ALL_DONE;
5543 switch (TREE_CODE (type))
5545 case RECORD_TYPE:
5546 case UNION_TYPE:
5547 case QUAL_UNION_TYPE:
5548 case ARRAY_TYPE:
5550 /* Use readonly data for initializers of this or smaller size
5551 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5552 ratio. */
5553 const HOST_WIDE_INT min_unique_size = 64;
5554 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5555 is smaller than this, use readonly data. */
5556 const int unique_nonzero_ratio = 8;
5557 /* True if a single access of the object must be ensured. This is the
5558 case if the target is volatile, the type is non-addressable and more
5559 than one field need to be assigned. */
5560 const bool ensure_single_access
5561 = TREE_THIS_VOLATILE (object)
5562 && !TREE_ADDRESSABLE (type)
5563 && vec_safe_length (elts) > 1;
5564 struct gimplify_init_ctor_preeval_data preeval_data;
5565 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
5566 HOST_WIDE_INT num_unique_nonzero_elements;
5567 int complete_p;
5568 bool valid_const_initializer;
5570 /* Aggregate types must lower constructors to initialization of
5571 individual elements. The exception is that a CONSTRUCTOR node
5572 with no elements indicates zero-initialization of the whole. */
5573 if (vec_safe_is_empty (elts))
5575 if (notify_temp_creation)
5576 return GS_OK;
5578 /* The var will be initialized and so appear on lhs of
5579 assignment, it can't be TREE_READONLY anymore. */
5580 if (VAR_P (object))
5581 TREE_READONLY (object) = 0;
5583 is_empty_ctor = true;
5584 break;
5587 /* Fetch information about the constructor to direct later processing.
5588 We might want to make static versions of it in various cases, and
5589 can only do so if it known to be a valid constant initializer. */
5590 valid_const_initializer
5591 = categorize_ctor_elements (ctor, &num_nonzero_elements,
5592 &num_unique_nonzero_elements,
5593 &num_ctor_elements, &complete_p);
5595 /* If a const aggregate variable is being initialized, then it
5596 should never be a lose to promote the variable to be static. */
5597 if (valid_const_initializer
5598 && num_nonzero_elements > 1
5599 && TREE_READONLY (object)
5600 && VAR_P (object)
5601 && !DECL_REGISTER (object)
5602 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)
5603 || DECL_MERGEABLE (object))
5604 /* For ctors that have many repeated nonzero elements
5605 represented through RANGE_EXPRs, prefer initializing
5606 those through runtime loops over copies of large amounts
5607 of data from readonly data section. */
5608 && (num_unique_nonzero_elements
5609 > num_nonzero_elements / unique_nonzero_ratio
5610 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
5611 <= (unsigned HOST_WIDE_INT) min_unique_size)))
5613 if (notify_temp_creation)
5614 return GS_ERROR;
5616 DECL_INITIAL (object) = ctor;
5617 TREE_STATIC (object) = 1;
5618 if (!DECL_NAME (object) || DECL_NAMELESS (object))
5619 DECL_NAME (object) = create_tmp_var_name ("C");
5620 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
5622 /* ??? C++ doesn't automatically append a .<number> to the
5623 assembler name, and even when it does, it looks at FE private
5624 data structures to figure out what that number should be,
5625 which are not set for this variable. I suppose this is
5626 important for local statics for inline functions, which aren't
5627 "local" in the object file sense. So in order to get a unique
5628 TU-local symbol, we must invoke the lhd version now. */
5629 lhd_set_decl_assembler_name (object);
5631 *expr_p = NULL_TREE;
5632 break;
5635 /* The var will be initialized and so appear on lhs of
5636 assignment, it can't be TREE_READONLY anymore. */
5637 if (VAR_P (object) && !notify_temp_creation)
5638 TREE_READONLY (object) = 0;
5640 /* If there are "lots" of initialized elements, even discounting
5641 those that are not address constants (and thus *must* be
5642 computed at runtime), then partition the constructor into
5643 constant and non-constant parts. Block copy the constant
5644 parts in, then generate code for the non-constant parts. */
5645 /* TODO. There's code in cp/typeck.cc to do this. */
5647 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
5648 /* store_constructor will ignore the clearing of variable-sized
5649 objects. Initializers for such objects must explicitly set
5650 every field that needs to be set. */
5651 cleared = false;
5652 else if (!complete_p)
5653 /* If the constructor isn't complete, clear the whole object
5654 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5656 ??? This ought not to be needed. For any element not present
5657 in the initializer, we should simply set them to zero. Except
5658 we'd need to *find* the elements that are not present, and that
5659 requires trickery to avoid quadratic compile-time behavior in
5660 large cases or excessive memory use in small cases. */
5661 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5662 else if (num_ctor_elements - num_nonzero_elements
5663 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5664 && num_nonzero_elements < num_ctor_elements / 4)
5665 /* If there are "lots" of zeros, it's more efficient to clear
5666 the memory and then set the nonzero elements. */
5667 cleared = true;
5668 else if (ensure_single_access && num_nonzero_elements == 0)
5669 /* If a single access to the target must be ensured and all elements
5670 are zero, then it's optimal to clear whatever their number. */
5671 cleared = true;
5672 /* If the object is small enough to go in registers, and it's
5673 not required to be constructed in memory, clear it first.
5674 That will avoid wasting cycles preserving any padding bits
5675 that might be there, and if there aren't any, the compiler
5676 is smart enough to optimize the clearing out. */
5677 else if (complete_p <= 0
5678 && !TREE_ADDRESSABLE (ctor)
5679 && !TREE_THIS_VOLATILE (object)
5680 && (TYPE_MODE (type) != BLKmode || TYPE_NO_FORCE_BLK (type))
5681 && optimize)
5682 cleared = true;
5683 else
5684 cleared = false;
5686 /* If there are "lots" of initialized elements, and all of them
5687 are valid address constants, then the entire initializer can
5688 be dropped to memory, and then memcpy'd out. Don't do this
5689 for sparse arrays, though, as it's more efficient to follow
5690 the standard CONSTRUCTOR behavior of memset followed by
5691 individual element initialization. Also don't do this for small
5692 all-zero initializers (which aren't big enough to merit
5693 clearing), and don't try to make bitwise copies of
5694 TREE_ADDRESSABLE types. */
5695 if (valid_const_initializer
5696 && complete_p
5697 && !(cleared || num_nonzero_elements == 0)
5698 && !TREE_ADDRESSABLE (type))
5700 HOST_WIDE_INT size = int_size_in_bytes (type);
5701 unsigned int align;
5703 /* ??? We can still get unbounded array types, at least
5704 from the C++ front end. This seems wrong, but attempt
5705 to work around it for now. */
5706 if (size < 0)
5708 size = int_size_in_bytes (TREE_TYPE (object));
5709 if (size >= 0)
5710 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5713 /* Find the maximum alignment we can assume for the object. */
5714 /* ??? Make use of DECL_OFFSET_ALIGN. */
5715 if (DECL_P (object))
5716 align = DECL_ALIGN (object);
5717 else
5718 align = TYPE_ALIGN (type);
5720 /* Do a block move either if the size is so small as to make
5721 each individual move a sub-unit move on average, or if it
5722 is so large as to make individual moves inefficient. */
5723 if (size > 0
5724 && num_nonzero_elements > 1
5725 /* For ctors that have many repeated nonzero elements
5726 represented through RANGE_EXPRs, prefer initializing
5727 those through runtime loops over copies of large amounts
5728 of data from readonly data section. */
5729 && (num_unique_nonzero_elements
5730 > num_nonzero_elements / unique_nonzero_ratio
5731 || size <= min_unique_size)
5732 && (size < num_nonzero_elements
5733 || !can_move_by_pieces (size, align)))
5735 if (notify_temp_creation)
5736 return GS_ERROR;
5738 walk_tree (&ctor, force_labels_r, NULL, NULL);
5739 ctor = tree_output_constant_def (ctor);
5740 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5741 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5742 TREE_OPERAND (*expr_p, 1) = ctor;
5744 /* This is no longer an assignment of a CONSTRUCTOR, but
5745 we still may have processing to do on the LHS. So
5746 pretend we didn't do anything here to let that happen. */
5747 return GS_UNHANDLED;
5751 /* If a single access to the target must be ensured and there are
5752 nonzero elements or the zero elements are not assigned en masse,
5753 initialize the target from a temporary. */
5754 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5756 if (notify_temp_creation)
5757 return GS_ERROR;
5759 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5760 TREE_OPERAND (*expr_p, 0) = temp;
5761 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5762 *expr_p,
5763 build2 (MODIFY_EXPR, void_type_node,
5764 object, temp));
5765 return GS_OK;
5768 if (notify_temp_creation)
5769 return GS_OK;
5771 /* If there are nonzero elements and if needed, pre-evaluate to capture
5772 elements overlapping with the lhs into temporaries. We must do this
5773 before clearing to fetch the values before they are zeroed-out. */
5774 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5776 preeval_data.lhs_base_decl = get_base_address (object);
5777 if (!DECL_P (preeval_data.lhs_base_decl))
5778 preeval_data.lhs_base_decl = NULL;
5779 preeval_data.lhs_alias_set = get_alias_set (object);
5781 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5782 pre_p, post_p, &preeval_data);
5785 bool ctor_has_side_effects_p
5786 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5788 if (cleared)
5790 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5791 Note that we still have to gimplify, in order to handle the
5792 case of variable sized types. Avoid shared tree structures. */
5793 CONSTRUCTOR_ELTS (ctor) = NULL;
5794 TREE_SIDE_EFFECTS (ctor) = 0;
5795 object = unshare_expr (object);
5796 gimplify_stmt (expr_p, pre_p);
5799 /* If we have not block cleared the object, or if there are nonzero
5800 elements in the constructor, or if the constructor has side effects,
5801 add assignments to the individual scalar fields of the object. */
5802 if (!cleared
5803 || num_nonzero_elements > 0
5804 || ctor_has_side_effects_p)
5805 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5807 *expr_p = NULL_TREE;
5809 break;
5811 case COMPLEX_TYPE:
5813 tree r, i;
5815 if (notify_temp_creation)
5816 return GS_OK;
5818 /* Extract the real and imaginary parts out of the ctor. */
5819 gcc_assert (elts->length () == 2);
5820 r = (*elts)[0].value;
5821 i = (*elts)[1].value;
5822 if (r == NULL || i == NULL)
5824 tree zero = build_zero_cst (TREE_TYPE (type));
5825 if (r == NULL)
5826 r = zero;
5827 if (i == NULL)
5828 i = zero;
5831 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5832 represent creation of a complex value. */
5833 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5835 ctor = build_complex (type, r, i);
5836 TREE_OPERAND (*expr_p, 1) = ctor;
5838 else
5840 ctor = build2 (COMPLEX_EXPR, type, r, i);
5841 TREE_OPERAND (*expr_p, 1) = ctor;
5842 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5843 pre_p,
5844 post_p,
5845 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5846 fb_rvalue);
5849 break;
5851 case VECTOR_TYPE:
5853 unsigned HOST_WIDE_INT ix;
5854 constructor_elt *ce;
5856 if (notify_temp_creation)
5857 return GS_OK;
5859 /* Vector types use CONSTRUCTOR all the way through gimple
5860 compilation as a general initializer. */
5861 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5863 enum gimplify_status tret;
5864 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5865 fb_rvalue);
5866 if (tret == GS_ERROR)
5867 ret = GS_ERROR;
5868 else if (TREE_STATIC (ctor)
5869 && !initializer_constant_valid_p (ce->value,
5870 TREE_TYPE (ce->value)))
5871 TREE_STATIC (ctor) = 0;
5873 recompute_constructor_flags (ctor);
5875 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5876 if (TREE_CONSTANT (ctor))
5878 bool constant_p = true;
5879 tree value;
5881 /* Even when ctor is constant, it might contain non-*_CST
5882 elements, such as addresses or trapping values like
5883 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5884 in VECTOR_CST nodes. */
5885 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5886 if (!CONSTANT_CLASS_P (value))
5888 constant_p = false;
5889 break;
5892 if (constant_p)
5894 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5895 break;
5899 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5900 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5902 break;
5904 default:
5905 /* So how did we get a CONSTRUCTOR for a scalar type? */
5906 gcc_unreachable ();
5909 if (ret == GS_ERROR)
5910 return GS_ERROR;
5911 /* If we have gimplified both sides of the initializer but have
5912 not emitted an assignment, do so now. */
5913 if (*expr_p
5914 /* If the type is an empty type, we don't need to emit the
5915 assignment. */
5916 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
5918 tree lhs = TREE_OPERAND (*expr_p, 0);
5919 tree rhs = TREE_OPERAND (*expr_p, 1);
5920 if (want_value && object == lhs)
5921 lhs = unshare_expr (lhs);
5922 gassign *init = gimple_build_assign (lhs, rhs);
5923 gimplify_seq_add_stmt (pre_p, init);
5925 if (want_value)
5927 *expr_p = object;
5928 ret = GS_OK;
5930 else
5932 *expr_p = NULL;
5933 ret = GS_ALL_DONE;
5936 /* If the user requests to initialize automatic variables, we
5937 should initialize paddings inside the variable. Add a call to
5938 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5939 initialize paddings of object always to zero regardless of
5940 INIT_TYPE. Note, we will not insert this call if the aggregate
5941 variable has be completely cleared already or it's initialized
5942 with an empty constructor. We cannot insert this call if the
5943 variable is a gimple register since __builtin_clear_padding will take
5944 the address of the variable. As a result, if a long double/_Complex long
5945 double variable will be spilled into stack later, its padding cannot
5946 be cleared with __builtin_clear_padding. We should clear its padding
5947 when it is spilled into memory. */
5948 if (is_init_expr
5949 && !is_gimple_reg (object)
5950 && clear_padding_type_may_have_padding_p (type)
5951 && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
5952 || !AGGREGATE_TYPE_P (type))
5953 && is_var_need_auto_init (object))
5954 gimple_add_padding_init_for_auto_var (object, false, pre_p);
5956 return ret;
5959 /* Given a pointer value OP0, return a simplified version of an
5960 indirection through OP0, or NULL_TREE if no simplification is
5961 possible. This may only be applied to a rhs of an expression.
5962 Note that the resulting type may be different from the type pointed
5963 to in the sense that it is still compatible from the langhooks
5964 point of view. */
5966 static tree
5967 gimple_fold_indirect_ref_rhs (tree t)
5969 return gimple_fold_indirect_ref (t);
5972 /* Subroutine of gimplify_modify_expr to do simplifications of
5973 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5974 something changes. */
5976 static enum gimplify_status
5977 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5978 gimple_seq *pre_p, gimple_seq *post_p,
5979 bool want_value)
5981 enum gimplify_status ret = GS_UNHANDLED;
5982 bool changed;
5986 changed = false;
5987 switch (TREE_CODE (*from_p))
5989 case VAR_DECL:
5990 /* If we're assigning from a read-only variable initialized with
5991 a constructor and not volatile, do the direct assignment from
5992 the constructor, but only if the target is not volatile either
5993 since this latter assignment might end up being done on a per
5994 field basis. However, if the target is volatile and the type
5995 is aggregate and non-addressable, gimplify_init_constructor
5996 knows that it needs to ensure a single access to the target
5997 and it will return GS_OK only in this case. */
5998 if (TREE_READONLY (*from_p)
5999 && DECL_INITIAL (*from_p)
6000 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
6001 && !TREE_THIS_VOLATILE (*from_p)
6002 && (!TREE_THIS_VOLATILE (*to_p)
6003 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
6004 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
6006 tree old_from = *from_p;
6007 enum gimplify_status subret;
6009 /* Move the constructor into the RHS. */
6010 *from_p = unshare_expr (DECL_INITIAL (*from_p));
6012 /* Let's see if gimplify_init_constructor will need to put
6013 it in memory. */
6014 subret = gimplify_init_constructor (expr_p, NULL, NULL,
6015 false, true);
6016 if (subret == GS_ERROR)
6018 /* If so, revert the change. */
6019 *from_p = old_from;
6021 else
6023 ret = GS_OK;
6024 changed = true;
6027 break;
6028 case INDIRECT_REF:
6029 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p)))
6030 /* If we have code like
6032 *(const A*)(A*)&x
6034 where the type of "x" is a (possibly cv-qualified variant
6035 of "A"), treat the entire expression as identical to "x".
6036 This kind of code arises in C++ when an object is bound
6037 to a const reference, and if "x" is a TARGET_EXPR we want
6038 to take advantage of the optimization below. But not if
6039 the type is TREE_ADDRESSABLE; then C++17 says that the
6040 TARGET_EXPR needs to be a temporary. */
6041 if (tree t
6042 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)))
6044 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
6045 if (TREE_THIS_VOLATILE (t) != volatile_p)
6047 if (DECL_P (t))
6048 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
6049 build_fold_addr_expr (t));
6050 if (REFERENCE_CLASS_P (t))
6051 TREE_THIS_VOLATILE (t) = volatile_p;
6053 *from_p = t;
6054 ret = GS_OK;
6055 changed = true;
6057 break;
6059 case TARGET_EXPR:
6061 /* If we are initializing something from a TARGET_EXPR, strip the
6062 TARGET_EXPR and initialize it directly, if possible. This can't
6063 be done if the initializer is void, since that implies that the
6064 temporary is set in some non-trivial way.
6066 ??? What about code that pulls out the temp and uses it
6067 elsewhere? I think that such code never uses the TARGET_EXPR as
6068 an initializer. If I'm wrong, we'll die because the temp won't
6069 have any RTL. In that case, I guess we'll need to replace
6070 references somehow. */
6071 tree init = TARGET_EXPR_INITIAL (*from_p);
6073 if (init
6074 && (TREE_CODE (*expr_p) != MODIFY_EXPR
6075 || !TARGET_EXPR_NO_ELIDE (*from_p))
6076 && !VOID_TYPE_P (TREE_TYPE (init)))
6078 *from_p = init;
6079 ret = GS_OK;
6080 changed = true;
6083 break;
6085 case COMPOUND_EXPR:
6086 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
6087 caught. */
6088 gimplify_compound_expr (from_p, pre_p, true);
6089 ret = GS_OK;
6090 changed = true;
6091 break;
6093 case CONSTRUCTOR:
6094 /* If we already made some changes, let the front end have a
6095 crack at this before we break it down. */
6096 if (ret != GS_UNHANDLED)
6097 break;
6099 /* If we're initializing from a CONSTRUCTOR, break this into
6100 individual MODIFY_EXPRs. */
6101 ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
6102 false);
6103 return ret;
6105 case COND_EXPR:
6106 /* If we're assigning to a non-register type, push the assignment
6107 down into the branches. This is mandatory for ADDRESSABLE types,
6108 since we cannot generate temporaries for such, but it saves a
6109 copy in other cases as well.
6110 Also avoid an extra temporary and copy when assigning to
6111 a register. */
6112 if (!is_gimple_reg_type (TREE_TYPE (*from_p))
6113 || (is_gimple_reg (*to_p) && !gimplify_ctxp->allow_rhs_cond_expr))
6115 /* This code should mirror the code in gimplify_cond_expr. */
6116 enum tree_code code = TREE_CODE (*expr_p);
6117 tree cond = *from_p;
6118 tree result = *to_p;
6120 ret = gimplify_expr (&result, pre_p, post_p,
6121 is_gimple_lvalue, fb_lvalue);
6122 if (ret != GS_ERROR)
6123 ret = GS_OK;
6125 /* If we are going to write RESULT more than once, clear
6126 TREE_READONLY flag, otherwise we might incorrectly promote
6127 the variable to static const and initialize it at compile
6128 time in one of the branches. */
6129 if (VAR_P (result)
6130 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
6131 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
6132 TREE_READONLY (result) = 0;
6133 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
6134 TREE_OPERAND (cond, 1)
6135 = build2 (code, void_type_node, result,
6136 TREE_OPERAND (cond, 1));
6137 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
6138 TREE_OPERAND (cond, 2)
6139 = build2 (code, void_type_node, unshare_expr (result),
6140 TREE_OPERAND (cond, 2));
6142 TREE_TYPE (cond) = void_type_node;
6143 recalculate_side_effects (cond);
6145 if (want_value)
6147 gimplify_and_add (cond, pre_p);
6148 *expr_p = unshare_expr (result);
6150 else
6151 *expr_p = cond;
6152 return ret;
6154 break;
6156 case CALL_EXPR:
6157 /* For calls that return in memory, give *to_p as the CALL_EXPR's
6158 return slot so that we don't generate a temporary. */
6159 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
6160 && aggregate_value_p (*from_p, *from_p))
6162 bool use_target;
6164 if (!(rhs_predicate_for (*to_p))(*from_p))
6165 /* If we need a temporary, *to_p isn't accurate. */
6166 use_target = false;
6167 /* It's OK to use the return slot directly unless it's an NRV. */
6168 else if (TREE_CODE (*to_p) == RESULT_DECL
6169 && DECL_NAME (*to_p) == NULL_TREE
6170 && needs_to_live_in_memory (*to_p))
6171 use_target = true;
6172 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
6173 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
6174 /* Don't force regs into memory. */
6175 use_target = false;
6176 else if (TREE_CODE (*expr_p) == INIT_EXPR)
6177 /* It's OK to use the target directly if it's being
6178 initialized. */
6179 use_target = true;
6180 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
6181 != INTEGER_CST)
6182 /* Always use the target and thus RSO for variable-sized types.
6183 GIMPLE cannot deal with a variable-sized assignment
6184 embedded in a call statement. */
6185 use_target = true;
6186 else if (TREE_CODE (*to_p) != SSA_NAME
6187 && (!is_gimple_variable (*to_p)
6188 || needs_to_live_in_memory (*to_p)))
6189 /* Don't use the original target if it's already addressable;
6190 if its address escapes, and the called function uses the
6191 NRV optimization, a conforming program could see *to_p
6192 change before the called function returns; see c++/19317.
6193 When optimizing, the return_slot pass marks more functions
6194 as safe after we have escape info. */
6195 use_target = false;
6196 else
6197 use_target = true;
6199 if (use_target)
6201 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
6202 mark_addressable (*to_p);
6205 break;
6207 case WITH_SIZE_EXPR:
6208 /* Likewise for calls that return an aggregate of non-constant size,
6209 since we would not be able to generate a temporary at all. */
6210 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
6212 *from_p = TREE_OPERAND (*from_p, 0);
6213 /* We don't change ret in this case because the
6214 WITH_SIZE_EXPR might have been added in
6215 gimplify_modify_expr, so returning GS_OK would lead to an
6216 infinite loop. */
6217 changed = true;
6219 break;
6221 /* If we're initializing from a container, push the initialization
6222 inside it. */
6223 case CLEANUP_POINT_EXPR:
6224 case BIND_EXPR:
6225 case STATEMENT_LIST:
6227 tree wrap = *from_p;
6228 tree t;
6230 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
6231 fb_lvalue);
6232 if (ret != GS_ERROR)
6233 ret = GS_OK;
6235 t = voidify_wrapper_expr (wrap, *expr_p);
6236 gcc_assert (t == *expr_p);
6238 if (want_value)
6240 gimplify_and_add (wrap, pre_p);
6241 *expr_p = unshare_expr (*to_p);
6243 else
6244 *expr_p = wrap;
6245 return GS_OK;
6248 case NOP_EXPR:
6249 /* Pull out compound literal expressions from a NOP_EXPR.
6250 Those are created in the C FE to drop qualifiers during
6251 lvalue conversion. */
6252 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
6253 && tree_ssa_useless_type_conversion (*from_p))
6255 *from_p = TREE_OPERAND (*from_p, 0);
6256 ret = GS_OK;
6257 changed = true;
6259 break;
6261 case COMPOUND_LITERAL_EXPR:
6263 tree complit = TREE_OPERAND (*expr_p, 1);
6264 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
6265 tree decl = DECL_EXPR_DECL (decl_s);
6266 tree init = DECL_INITIAL (decl);
6268 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
6269 into struct T x = { 0, 1, 2 } if the address of the
6270 compound literal has never been taken. */
6271 if (!TREE_ADDRESSABLE (complit)
6272 && !TREE_ADDRESSABLE (decl)
6273 && init)
6275 *expr_p = copy_node (*expr_p);
6276 TREE_OPERAND (*expr_p, 1) = init;
6277 return GS_OK;
6281 default:
6282 break;
6285 while (changed);
6287 return ret;
6291 /* Return true if T looks like a valid GIMPLE statement. */
6293 static bool
6294 is_gimple_stmt (tree t)
6296 const enum tree_code code = TREE_CODE (t);
6298 switch (code)
6300 case NOP_EXPR:
6301 /* The only valid NOP_EXPR is the empty statement. */
6302 return IS_EMPTY_STMT (t);
6304 case BIND_EXPR:
6305 case COND_EXPR:
6306 /* These are only valid if they're void. */
6307 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
6309 case SWITCH_EXPR:
6310 case GOTO_EXPR:
6311 case RETURN_EXPR:
6312 case LABEL_EXPR:
6313 case CASE_LABEL_EXPR:
6314 case TRY_CATCH_EXPR:
6315 case TRY_FINALLY_EXPR:
6316 case EH_FILTER_EXPR:
6317 case CATCH_EXPR:
6318 case ASM_EXPR:
6319 case STATEMENT_LIST:
6320 case OACC_PARALLEL:
6321 case OACC_KERNELS:
6322 case OACC_SERIAL:
6323 case OACC_DATA:
6324 case OACC_HOST_DATA:
6325 case OACC_DECLARE:
6326 case OACC_UPDATE:
6327 case OACC_ENTER_DATA:
6328 case OACC_EXIT_DATA:
6329 case OACC_CACHE:
6330 case OMP_PARALLEL:
6331 case OMP_FOR:
6332 case OMP_SIMD:
6333 case OMP_DISTRIBUTE:
6334 case OMP_LOOP:
6335 case OMP_TILE:
6336 case OMP_UNROLL:
6337 case OACC_LOOP:
6338 case OMP_SCAN:
6339 case OMP_SCOPE:
6340 case OMP_SECTIONS:
6341 case OMP_SECTION:
6342 case OMP_STRUCTURED_BLOCK:
6343 case OMP_SINGLE:
6344 case OMP_MASTER:
6345 case OMP_MASKED:
6346 case OMP_TASKGROUP:
6347 case OMP_ORDERED:
6348 case OMP_CRITICAL:
6349 case OMP_TASK:
6350 case OMP_TARGET:
6351 case OMP_TARGET_DATA:
6352 case OMP_TARGET_UPDATE:
6353 case OMP_TARGET_ENTER_DATA:
6354 case OMP_TARGET_EXIT_DATA:
6355 case OMP_TASKLOOP:
6356 case OMP_TEAMS:
6357 /* These are always void. */
6358 return true;
6360 case CALL_EXPR:
6361 case MODIFY_EXPR:
6362 case PREDICT_EXPR:
6363 /* These are valid regardless of their type. */
6364 return true;
6366 default:
6367 return false;
6372 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
6373 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
6375 IMPORTANT NOTE: This promotion is performed by introducing a load of the
6376 other, unmodified part of the complex object just before the total store.
6377 As a consequence, if the object is still uninitialized, an undefined value
6378 will be loaded into a register, which may result in a spurious exception
6379 if the register is floating-point and the value happens to be a signaling
6380 NaN for example. Then the fully-fledged complex operations lowering pass
6381 followed by a DCE pass are necessary in order to fix things up. */
6383 static enum gimplify_status
6384 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
6385 bool want_value)
6387 enum tree_code code, ocode;
6388 tree lhs, rhs, new_rhs, other, realpart, imagpart;
6390 lhs = TREE_OPERAND (*expr_p, 0);
6391 rhs = TREE_OPERAND (*expr_p, 1);
6392 code = TREE_CODE (lhs);
6393 lhs = TREE_OPERAND (lhs, 0);
6395 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
6396 other = build1 (ocode, TREE_TYPE (rhs), lhs);
6397 suppress_warning (other);
6398 other = get_formal_tmp_var (other, pre_p);
6400 realpart = code == REALPART_EXPR ? rhs : other;
6401 imagpart = code == REALPART_EXPR ? other : rhs;
6403 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
6404 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
6405 else
6406 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
6408 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
6409 *expr_p = (want_value) ? rhs : NULL_TREE;
6411 return GS_ALL_DONE;
6414 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6416 modify_expr
6417 : varname '=' rhs
6418 | '*' ID '=' rhs
6420 PRE_P points to the list where side effects that must happen before
6421 *EXPR_P should be stored.
6423 POST_P points to the list where side effects that must happen after
6424 *EXPR_P should be stored.
6426 WANT_VALUE is nonzero iff we want to use the value of this expression
6427 in another expression. */
6429 static enum gimplify_status
6430 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6431 bool want_value)
6433 tree *from_p = &TREE_OPERAND (*expr_p, 1);
6434 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6435 enum gimplify_status ret = GS_UNHANDLED;
6436 gimple *assign;
6437 location_t loc = EXPR_LOCATION (*expr_p);
6438 gimple_stmt_iterator gsi;
6440 if (error_operand_p (*from_p) || error_operand_p (*to_p))
6441 return GS_ERROR;
6443 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
6444 || TREE_CODE (*expr_p) == INIT_EXPR);
6446 /* Trying to simplify a clobber using normal logic doesn't work,
6447 so handle it here. */
6448 if (TREE_CLOBBER_P (*from_p))
6450 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6451 if (ret == GS_ERROR)
6452 return ret;
6453 gcc_assert (!want_value);
6454 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
6456 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
6457 pre_p, post_p);
6458 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
6460 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
6461 *expr_p = NULL;
6462 return GS_ALL_DONE;
6465 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6466 memset. */
6467 if (TREE_TYPE (*from_p) != error_mark_node
6468 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p))
6469 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p)))
6470 && TREE_CODE (*from_p) == CONSTRUCTOR
6471 && CONSTRUCTOR_NELTS (*from_p) == 0)
6473 maybe_with_size_expr (from_p);
6474 gcc_assert (TREE_CODE (*from_p) == WITH_SIZE_EXPR);
6475 return gimplify_modify_expr_to_memset (expr_p,
6476 TREE_OPERAND (*from_p, 1),
6477 want_value, pre_p);
6480 /* Insert pointer conversions required by the middle-end that are not
6481 required by the frontend. This fixes middle-end type checking for
6482 for example gcc.dg/redecl-6.c. */
6483 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
6485 STRIP_USELESS_TYPE_CONVERSION (*from_p);
6486 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
6487 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
6490 /* See if any simplifications can be done based on what the RHS is. */
6491 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6492 want_value);
6493 if (ret != GS_UNHANDLED)
6494 return ret;
6496 /* For empty types only gimplify the left hand side and right hand
6497 side as statements and throw away the assignment. Do this after
6498 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6499 types properly. */
6500 if (is_empty_type (TREE_TYPE (*from_p))
6501 && !want_value
6502 /* Don't do this for calls that return addressable types, expand_call
6503 relies on those having a lhs. */
6504 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
6505 && TREE_CODE (*from_p) == CALL_EXPR))
6507 gimplify_stmt (from_p, pre_p);
6508 gimplify_stmt (to_p, pre_p);
6509 *expr_p = NULL_TREE;
6510 return GS_ALL_DONE;
6513 /* If the value being copied is of variable width, compute the length
6514 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6515 before gimplifying any of the operands so that we can resolve any
6516 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6517 the size of the expression to be copied, not of the destination, so
6518 that is what we must do here. */
6519 maybe_with_size_expr (from_p);
6521 /* As a special case, we have to temporarily allow for assignments
6522 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6523 a toplevel statement, when gimplifying the GENERIC expression
6524 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6525 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6527 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6528 prevent gimplify_expr from trying to create a new temporary for
6529 foo's LHS, we tell it that it should only gimplify until it
6530 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6531 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6532 and all we need to do here is set 'a' to be its LHS. */
6534 /* Gimplify the RHS first for C++17 and bug 71104. */
6535 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
6536 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
6537 if (ret == GS_ERROR)
6538 return ret;
6540 /* Then gimplify the LHS. */
6541 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6542 twice we have to make sure to gimplify into non-SSA as otherwise
6543 the abnormal edge added later will make those defs not dominate
6544 their uses.
6545 ??? Technically this applies only to the registers used in the
6546 resulting non-register *TO_P. */
6547 bool saved_into_ssa = gimplify_ctxp->into_ssa;
6548 if (saved_into_ssa
6549 && TREE_CODE (*from_p) == CALL_EXPR
6550 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
6551 gimplify_ctxp->into_ssa = false;
6552 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6553 gimplify_ctxp->into_ssa = saved_into_ssa;
6554 if (ret == GS_ERROR)
6555 return ret;
6557 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6558 guess for the predicate was wrong. */
6559 gimple_predicate final_pred = rhs_predicate_for (*to_p);
6560 if (final_pred != initial_pred)
6562 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
6563 if (ret == GS_ERROR)
6564 return ret;
6567 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6568 size as argument to the call. */
6569 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6571 tree call = TREE_OPERAND (*from_p, 0);
6572 tree vlasize = TREE_OPERAND (*from_p, 1);
6574 if (TREE_CODE (call) == CALL_EXPR
6575 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
6577 int nargs = call_expr_nargs (call);
6578 tree type = TREE_TYPE (call);
6579 tree ap = CALL_EXPR_ARG (call, 0);
6580 tree tag = CALL_EXPR_ARG (call, 1);
6581 tree aptag = CALL_EXPR_ARG (call, 2);
6582 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
6583 IFN_VA_ARG, type,
6584 nargs + 1, ap, tag,
6585 aptag, vlasize);
6586 TREE_OPERAND (*from_p, 0) = newcall;
6590 /* Now see if the above changed *from_p to something we handle specially. */
6591 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6592 want_value);
6593 if (ret != GS_UNHANDLED)
6594 return ret;
6596 /* If we've got a variable sized assignment between two lvalues (i.e. does
6597 not involve a call), then we can make things a bit more straightforward
6598 by converting the assignment to memcpy or memset. */
6599 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6601 tree from = TREE_OPERAND (*from_p, 0);
6602 tree size = TREE_OPERAND (*from_p, 1);
6604 if (TREE_CODE (from) == CONSTRUCTOR)
6605 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
6606 else if (is_gimple_addressable (from)
6607 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (*to_p)))
6608 && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from))))
6610 *from_p = from;
6611 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
6612 pre_p);
6616 /* Transform partial stores to non-addressable complex variables into
6617 total stores. This allows us to use real instead of virtual operands
6618 for these variables, which improves optimization. */
6619 if ((TREE_CODE (*to_p) == REALPART_EXPR
6620 || TREE_CODE (*to_p) == IMAGPART_EXPR)
6621 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
6622 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
6624 /* Try to alleviate the effects of the gimplification creating artificial
6625 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6626 make sure not to create DECL_DEBUG_EXPR links across functions. */
6627 if (!gimplify_ctxp->into_ssa
6628 && VAR_P (*from_p)
6629 && DECL_IGNORED_P (*from_p)
6630 && DECL_P (*to_p)
6631 && !DECL_IGNORED_P (*to_p)
6632 && decl_function_context (*to_p) == current_function_decl
6633 && decl_function_context (*from_p) == current_function_decl)
6635 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
6636 DECL_NAME (*from_p)
6637 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
6638 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
6639 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
6642 if (want_value && TREE_THIS_VOLATILE (*to_p))
6643 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
6645 if (TREE_CODE (*from_p) == CALL_EXPR)
6647 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6648 instead of a GIMPLE_ASSIGN. */
6649 gcall *call_stmt;
6650 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
6652 /* Gimplify internal functions created in the FEs. */
6653 int nargs = call_expr_nargs (*from_p), i;
6654 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
6655 auto_vec<tree> vargs (nargs);
6657 for (i = 0; i < nargs; i++)
6659 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
6660 EXPR_LOCATION (*from_p));
6661 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
6663 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
6664 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
6665 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
6667 else
6669 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
6670 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
6671 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
6672 tree fndecl = get_callee_fndecl (*from_p);
6673 if (fndecl
6674 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
6675 && call_expr_nargs (*from_p) == 3)
6676 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
6677 CALL_EXPR_ARG (*from_p, 0),
6678 CALL_EXPR_ARG (*from_p, 1),
6679 CALL_EXPR_ARG (*from_p, 2));
6680 else
6682 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
6685 notice_special_calls (call_stmt);
6686 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
6687 gimple_call_set_lhs (call_stmt, *to_p);
6688 else if (TREE_CODE (*to_p) == SSA_NAME)
6689 /* The above is somewhat premature, avoid ICEing later for a
6690 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6691 ??? This doesn't make it a default-def. */
6692 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
6694 assign = call_stmt;
6696 else
6698 assign = gimple_build_assign (*to_p, *from_p);
6699 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
6700 if (COMPARISON_CLASS_P (*from_p))
6701 copy_warning (assign, *from_p);
6704 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6706 /* We should have got an SSA name from the start. */
6707 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
6708 || ! gimple_in_ssa_p (cfun));
6711 gimplify_seq_add_stmt (pre_p, assign);
6712 gsi = gsi_last (*pre_p);
6713 maybe_fold_stmt (&gsi);
6715 if (want_value)
6717 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
6718 return GS_OK;
6720 else
6721 *expr_p = NULL;
6723 return GS_ALL_DONE;
6726 /* Gimplify a comparison between two variable-sized objects. Do this
6727 with a call to BUILT_IN_MEMCMP. */
6729 static enum gimplify_status
6730 gimplify_variable_sized_compare (tree *expr_p)
6732 location_t loc = EXPR_LOCATION (*expr_p);
6733 tree op0 = TREE_OPERAND (*expr_p, 0);
6734 tree op1 = TREE_OPERAND (*expr_p, 1);
6735 tree t, arg, dest, src, expr;
6737 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6738 arg = unshare_expr (arg);
6739 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6740 src = build_fold_addr_expr_loc (loc, op1);
6741 dest = build_fold_addr_expr_loc (loc, op0);
6742 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6743 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6745 expr
6746 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6747 SET_EXPR_LOCATION (expr, loc);
6748 *expr_p = expr;
6750 return GS_OK;
6753 /* Gimplify a comparison between two aggregate objects of integral scalar
6754 mode as a comparison between the bitwise equivalent scalar values. */
6756 static enum gimplify_status
6757 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6759 const location_t loc = EXPR_LOCATION (*expr_p);
6760 const enum tree_code code = TREE_CODE (*expr_p);
6761 tree op0 = TREE_OPERAND (*expr_p, 0);
6762 tree op1 = TREE_OPERAND (*expr_p, 1);
6763 tree type = TREE_TYPE (op0);
6764 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6766 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6767 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6769 /* We need to perform ordering comparisons in memory order like memcmp and,
6770 therefore, may need to byte-swap operands for little-endian targets. */
6771 if (code != EQ_EXPR && code != NE_EXPR)
6773 gcc_assert (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN);
6774 gcc_assert (TREE_CODE (scalar_type) == INTEGER_TYPE);
6775 tree fndecl;
6777 if (BYTES_BIG_ENDIAN)
6778 fndecl = NULL_TREE;
6779 else
6780 switch (int_size_in_bytes (scalar_type))
6782 case 1:
6783 fndecl = NULL_TREE;
6784 break;
6785 case 2:
6786 fndecl = builtin_decl_implicit (BUILT_IN_BSWAP16);
6787 break;
6788 case 4:
6789 fndecl = builtin_decl_implicit (BUILT_IN_BSWAP32);
6790 break;
6791 case 8:
6792 fndecl = builtin_decl_implicit (BUILT_IN_BSWAP64);
6793 break;
6794 case 16:
6795 fndecl = builtin_decl_implicit (BUILT_IN_BSWAP128);
6796 break;
6797 default:
6798 gcc_unreachable ();
6801 if (fndecl)
6803 op0 = build_call_expr_loc (loc, fndecl, 1, op0);
6804 op1 = build_call_expr_loc (loc, fndecl, 1, op1);
6808 *expr_p = fold_build2_loc (loc, code, TREE_TYPE (*expr_p), op0, op1);
6810 return GS_OK;
6813 /* Gimplify an expression sequence. This function gimplifies each
6814 expression and rewrites the original expression with the last
6815 expression of the sequence in GIMPLE form.
6817 PRE_P points to the list where the side effects for all the
6818 expressions in the sequence will be emitted.
6820 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6822 static enum gimplify_status
6823 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6825 tree t = *expr_p;
6829 tree *sub_p = &TREE_OPERAND (t, 0);
6831 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6832 gimplify_compound_expr (sub_p, pre_p, false);
6833 else
6834 gimplify_stmt (sub_p, pre_p);
6836 t = TREE_OPERAND (t, 1);
6838 while (TREE_CODE (t) == COMPOUND_EXPR);
6840 *expr_p = t;
6841 if (want_value)
6842 return GS_OK;
6843 else
6845 gimplify_stmt (expr_p, pre_p);
6846 return GS_ALL_DONE;
6850 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6851 gimplify. After gimplification, EXPR_P will point to a new temporary
6852 that holds the original value of the SAVE_EXPR node.
6854 PRE_P points to the list where side effects that must happen before
6855 *EXPR_P should be stored. */
6857 static enum gimplify_status
6858 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6860 enum gimplify_status ret = GS_ALL_DONE;
6861 tree val;
6863 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6864 val = TREE_OPERAND (*expr_p, 0);
6866 if (val && TREE_TYPE (val) == error_mark_node)
6867 return GS_ERROR;
6869 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6870 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6872 /* The operand may be a void-valued expression. It is
6873 being executed only for its side-effects. */
6874 if (TREE_TYPE (val) == void_type_node)
6876 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6877 is_gimple_stmt, fb_none);
6878 val = NULL;
6880 else
6881 /* The temporary may not be an SSA name as later abnormal and EH
6882 control flow may invalidate use/def domination. When in SSA
6883 form then assume there are no such issues and SAVE_EXPRs only
6884 appear via GENERIC foldings. */
6885 val = get_initialized_tmp_var (val, pre_p, post_p,
6886 gimple_in_ssa_p (cfun));
6888 TREE_OPERAND (*expr_p, 0) = val;
6889 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6892 *expr_p = val;
6894 return ret;
6897 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6899 unary_expr
6900 : ...
6901 | '&' varname
6904 PRE_P points to the list where side effects that must happen before
6905 *EXPR_P should be stored.
6907 POST_P points to the list where side effects that must happen after
6908 *EXPR_P should be stored. */
6910 static enum gimplify_status
6911 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6913 tree expr = *expr_p;
6914 tree op0 = TREE_OPERAND (expr, 0);
6915 enum gimplify_status ret;
6916 location_t loc = EXPR_LOCATION (*expr_p);
6918 switch (TREE_CODE (op0))
6920 case INDIRECT_REF:
6921 do_indirect_ref:
6922 /* Check if we are dealing with an expression of the form '&*ptr'.
6923 While the front end folds away '&*ptr' into 'ptr', these
6924 expressions may be generated internally by the compiler (e.g.,
6925 builtins like __builtin_va_end). */
6926 /* Caution: the silent array decomposition semantics we allow for
6927 ADDR_EXPR means we can't always discard the pair. */
6928 /* Gimplification of the ADDR_EXPR operand may drop
6929 cv-qualification conversions, so make sure we add them if
6930 needed. */
6932 tree op00 = TREE_OPERAND (op0, 0);
6933 tree t_expr = TREE_TYPE (expr);
6934 tree t_op00 = TREE_TYPE (op00);
6936 if (!useless_type_conversion_p (t_expr, t_op00))
6937 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6938 *expr_p = op00;
6939 ret = GS_OK;
6941 break;
6943 case VIEW_CONVERT_EXPR:
6944 /* Take the address of our operand and then convert it to the type of
6945 this ADDR_EXPR.
6947 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6948 all clear. The impact of this transformation is even less clear. */
6950 /* If the operand is a useless conversion, look through it. Doing so
6951 guarantees that the ADDR_EXPR and its operand will remain of the
6952 same type. */
6953 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6954 op0 = TREE_OPERAND (op0, 0);
6956 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6957 build_fold_addr_expr_loc (loc,
6958 TREE_OPERAND (op0, 0)));
6959 ret = GS_OK;
6960 break;
6962 case MEM_REF:
6963 if (integer_zerop (TREE_OPERAND (op0, 1)))
6964 goto do_indirect_ref;
6966 /* fall through */
6968 default:
6969 /* If we see a call to a declared builtin or see its address
6970 being taken (we can unify those cases here) then we can mark
6971 the builtin for implicit generation by GCC. */
6972 if (TREE_CODE (op0) == FUNCTION_DECL
6973 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6974 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6975 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6977 /* We use fb_either here because the C frontend sometimes takes
6978 the address of a call that returns a struct; see
6979 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6980 the implied temporary explicit. */
6982 /* Make the operand addressable. */
6983 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6984 is_gimple_addressable, fb_either);
6985 if (ret == GS_ERROR)
6986 break;
6988 /* Then mark it. Beware that it may not be possible to do so directly
6989 if a temporary has been created by the gimplification. */
6990 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6992 op0 = TREE_OPERAND (expr, 0);
6994 /* For various reasons, the gimplification of the expression
6995 may have made a new INDIRECT_REF. */
6996 if (INDIRECT_REF_P (op0)
6997 || (TREE_CODE (op0) == MEM_REF
6998 && integer_zerop (TREE_OPERAND (op0, 1))))
6999 goto do_indirect_ref;
7001 mark_addressable (TREE_OPERAND (expr, 0));
7003 /* The FEs may end up building ADDR_EXPRs early on a decl with
7004 an incomplete type. Re-build ADDR_EXPRs in canonical form
7005 here. */
7006 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
7007 *expr_p = build_fold_addr_expr (op0);
7009 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
7010 if (TREE_CODE (*expr_p) == ADDR_EXPR)
7011 recompute_tree_invariant_for_addr_expr (*expr_p);
7013 /* If we re-built the ADDR_EXPR add a conversion to the original type
7014 if required. */
7015 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
7016 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
7018 break;
7021 return ret;
7024 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
7025 value; output operands should be a gimple lvalue. */
7027 static enum gimplify_status
7028 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7030 tree expr;
7031 int noutputs;
7032 const char **oconstraints;
7033 int i;
7034 tree link;
7035 const char *constraint;
7036 bool allows_mem, allows_reg, is_inout;
7037 enum gimplify_status ret, tret;
7038 gasm *stmt;
7039 vec<tree, va_gc> *inputs;
7040 vec<tree, va_gc> *outputs;
7041 vec<tree, va_gc> *clobbers;
7042 vec<tree, va_gc> *labels;
7043 tree link_next;
7045 expr = *expr_p;
7046 noutputs = list_length (ASM_OUTPUTS (expr));
7047 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
7049 inputs = NULL;
7050 outputs = NULL;
7051 clobbers = NULL;
7052 labels = NULL;
7054 ret = GS_ALL_DONE;
7055 link_next = NULL_TREE;
7056 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
7058 bool ok;
7059 size_t constraint_len;
7061 link_next = TREE_CHAIN (link);
7063 oconstraints[i]
7064 = constraint
7065 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
7066 constraint_len = strlen (constraint);
7067 if (constraint_len == 0)
7068 continue;
7070 ok = parse_output_constraint (&constraint, i, 0, 0,
7071 &allows_mem, &allows_reg, &is_inout);
7072 if (!ok)
7074 ret = GS_ERROR;
7075 is_inout = false;
7078 /* If we can't make copies, we can only accept memory.
7079 Similarly for VLAs. */
7080 tree outtype = TREE_TYPE (TREE_VALUE (link));
7081 if (outtype != error_mark_node
7082 && (TREE_ADDRESSABLE (outtype)
7083 || !COMPLETE_TYPE_P (outtype)
7084 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
7086 if (allows_mem)
7087 allows_reg = 0;
7088 else
7090 error ("impossible constraint in %<asm%>");
7091 error ("non-memory output %d must stay in memory", i);
7092 return GS_ERROR;
7096 if (!allows_reg && allows_mem)
7097 mark_addressable (TREE_VALUE (link));
7099 tree orig = TREE_VALUE (link);
7100 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
7101 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
7102 fb_lvalue | fb_mayfail);
7103 if (tret == GS_ERROR)
7105 if (orig != error_mark_node)
7106 error ("invalid lvalue in %<asm%> output %d", i);
7107 ret = tret;
7110 /* If the gimplified operand is a register we do not allow memory. */
7111 if (allows_reg
7112 && allows_mem
7113 && (is_gimple_reg (TREE_VALUE (link))
7114 || (handled_component_p (TREE_VALUE (link))
7115 && is_gimple_reg (TREE_OPERAND (TREE_VALUE (link), 0)))))
7116 allows_mem = 0;
7118 /* If the constraint does not allow memory make sure we gimplify
7119 it to a register if it is not already but its base is. This
7120 happens for complex and vector components. */
7121 if (!allows_mem)
7123 tree op = TREE_VALUE (link);
7124 if (! is_gimple_val (op)
7125 && is_gimple_reg_type (TREE_TYPE (op))
7126 && is_gimple_reg (get_base_address (op)))
7128 tree tem = create_tmp_reg (TREE_TYPE (op));
7129 tree ass;
7130 if (is_inout)
7132 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
7133 tem, unshare_expr (op));
7134 gimplify_and_add (ass, pre_p);
7136 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
7137 gimplify_and_add (ass, post_p);
7139 TREE_VALUE (link) = tem;
7140 tret = GS_OK;
7144 vec_safe_push (outputs, link);
7145 TREE_CHAIN (link) = NULL_TREE;
7147 if (is_inout)
7149 /* An input/output operand. To give the optimizers more
7150 flexibility, split it into separate input and output
7151 operands. */
7152 tree input;
7153 /* Buffer big enough to format a 32-bit UINT_MAX into. */
7154 char buf[11];
7156 /* Turn the in/out constraint into an output constraint. */
7157 char *p = xstrdup (constraint);
7158 p[0] = '=';
7159 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
7161 /* And add a matching input constraint. */
7162 if (allows_reg)
7164 sprintf (buf, "%u", i);
7166 /* If there are multiple alternatives in the constraint,
7167 handle each of them individually. Those that allow register
7168 will be replaced with operand number, the others will stay
7169 unchanged. */
7170 if (strchr (p, ',') != NULL)
7172 size_t len = 0, buflen = strlen (buf);
7173 char *beg, *end, *str, *dst;
7175 for (beg = p + 1;;)
7177 end = strchr (beg, ',');
7178 if (end == NULL)
7179 end = strchr (beg, '\0');
7180 if ((size_t) (end - beg) < buflen)
7181 len += buflen + 1;
7182 else
7183 len += end - beg + 1;
7184 if (*end)
7185 beg = end + 1;
7186 else
7187 break;
7190 str = (char *) alloca (len);
7191 for (beg = p + 1, dst = str;;)
7193 const char *tem;
7194 bool mem_p, reg_p, inout_p;
7196 end = strchr (beg, ',');
7197 if (end)
7198 *end = '\0';
7199 beg[-1] = '=';
7200 tem = beg - 1;
7201 parse_output_constraint (&tem, i, 0, 0,
7202 &mem_p, &reg_p, &inout_p);
7203 if (dst != str)
7204 *dst++ = ',';
7205 if (reg_p)
7207 memcpy (dst, buf, buflen);
7208 dst += buflen;
7210 else
7212 if (end)
7213 len = end - beg;
7214 else
7215 len = strlen (beg);
7216 memcpy (dst, beg, len);
7217 dst += len;
7219 if (end)
7220 beg = end + 1;
7221 else
7222 break;
7224 *dst = '\0';
7225 input = build_string (dst - str, str);
7227 else
7228 input = build_string (strlen (buf), buf);
7230 else
7231 input = build_string (constraint_len - 1, constraint + 1);
7233 free (p);
7235 input = build_tree_list (build_tree_list (NULL_TREE, input),
7236 unshare_expr (TREE_VALUE (link)));
7237 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
7241 link_next = NULL_TREE;
7242 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
7244 link_next = TREE_CHAIN (link);
7245 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
7246 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7247 oconstraints, &allows_mem, &allows_reg);
7249 /* If we can't make copies, we can only accept memory. */
7250 tree intype = TREE_TYPE (TREE_VALUE (link));
7251 if (intype != error_mark_node
7252 && (TREE_ADDRESSABLE (intype)
7253 || !COMPLETE_TYPE_P (intype)
7254 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
7256 if (allows_mem)
7257 allows_reg = 0;
7258 else
7260 error ("impossible constraint in %<asm%>");
7261 error ("non-memory input %d must stay in memory", i);
7262 return GS_ERROR;
7266 /* If the operand is a memory input, it should be an lvalue. */
7267 if (!allows_reg && allows_mem)
7269 tree inputv = TREE_VALUE (link);
7270 STRIP_NOPS (inputv);
7271 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
7272 || TREE_CODE (inputv) == PREINCREMENT_EXPR
7273 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
7274 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
7275 || TREE_CODE (inputv) == MODIFY_EXPR)
7276 TREE_VALUE (link) = error_mark_node;
7277 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
7278 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7279 if (tret != GS_ERROR)
7281 /* Unlike output operands, memory inputs are not guaranteed
7282 to be lvalues by the FE, and while the expressions are
7283 marked addressable there, if it is e.g. a statement
7284 expression, temporaries in it might not end up being
7285 addressable. They might be already used in the IL and thus
7286 it is too late to make them addressable now though. */
7287 tree x = TREE_VALUE (link);
7288 while (handled_component_p (x))
7289 x = TREE_OPERAND (x, 0);
7290 if (TREE_CODE (x) == MEM_REF
7291 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
7292 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
7293 if ((VAR_P (x)
7294 || TREE_CODE (x) == PARM_DECL
7295 || TREE_CODE (x) == RESULT_DECL)
7296 && !TREE_ADDRESSABLE (x)
7297 && is_gimple_reg (x))
7299 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
7300 input_location), 0,
7301 "memory input %d is not directly addressable",
7303 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
7306 mark_addressable (TREE_VALUE (link));
7307 if (tret == GS_ERROR)
7309 if (inputv != error_mark_node)
7310 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
7311 "memory input %d is not directly addressable", i);
7312 ret = tret;
7315 else
7317 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
7318 is_gimple_asm_val, fb_rvalue);
7319 if (tret == GS_ERROR)
7320 ret = tret;
7323 TREE_CHAIN (link) = NULL_TREE;
7324 vec_safe_push (inputs, link);
7327 link_next = NULL_TREE;
7328 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
7330 link_next = TREE_CHAIN (link);
7331 TREE_CHAIN (link) = NULL_TREE;
7332 vec_safe_push (clobbers, link);
7335 link_next = NULL_TREE;
7336 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
7338 link_next = TREE_CHAIN (link);
7339 TREE_CHAIN (link) = NULL_TREE;
7340 vec_safe_push (labels, link);
7343 /* Do not add ASMs with errors to the gimple IL stream. */
7344 if (ret != GS_ERROR)
7346 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
7347 inputs, outputs, clobbers, labels);
7349 /* asm is volatile if it was marked by the user as volatile or
7350 there are no outputs or this is an asm goto. */
7351 gimple_asm_set_volatile (stmt,
7352 ASM_VOLATILE_P (expr)
7353 || noutputs == 0
7354 || labels);
7355 gimple_asm_set_basic (stmt, ASM_BASIC_P (expr));
7356 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
7358 gimplify_seq_add_stmt (pre_p, stmt);
7361 return ret;
7364 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
7365 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
7366 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
7367 return to this function.
7369 FIXME should we complexify the prequeue handling instead? Or use flags
7370 for all the cleanups and let the optimizer tighten them up? The current
7371 code seems pretty fragile; it will break on a cleanup within any
7372 non-conditional nesting. But any such nesting would be broken, anyway;
7373 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
7374 and continues out of it. We can do that at the RTL level, though, so
7375 having an optimizer to tighten up try/finally regions would be a Good
7376 Thing. */
7378 static enum gimplify_status
7379 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
7381 gimple_stmt_iterator iter;
7382 gimple_seq body_sequence = NULL;
7384 tree temp = voidify_wrapper_expr (*expr_p, NULL);
7386 /* We only care about the number of conditions between the innermost
7387 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
7388 any cleanups collected outside the CLEANUP_POINT_EXPR. */
7389 int old_conds = gimplify_ctxp->conditions;
7390 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
7391 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
7392 gimplify_ctxp->conditions = 0;
7393 gimplify_ctxp->conditional_cleanups = NULL;
7394 gimplify_ctxp->in_cleanup_point_expr = true;
7396 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
7398 gimplify_ctxp->conditions = old_conds;
7399 gimplify_ctxp->conditional_cleanups = old_cleanups;
7400 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
7402 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
7404 gimple *wce = gsi_stmt (iter);
7406 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
7408 if (gsi_one_before_end_p (iter))
7410 /* Note that gsi_insert_seq_before and gsi_remove do not
7411 scan operands, unlike some other sequence mutators. */
7412 if (!gimple_wce_cleanup_eh_only (wce))
7413 gsi_insert_seq_before_without_update (&iter,
7414 gimple_wce_cleanup (wce),
7415 GSI_SAME_STMT);
7416 gsi_remove (&iter, true);
7417 break;
7419 else
7421 gtry *gtry;
7422 gimple_seq seq;
7423 enum gimple_try_flags kind;
7425 if (gimple_wce_cleanup_eh_only (wce))
7426 kind = GIMPLE_TRY_CATCH;
7427 else
7428 kind = GIMPLE_TRY_FINALLY;
7429 seq = gsi_split_seq_after (iter);
7431 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
7432 /* Do not use gsi_replace here, as it may scan operands.
7433 We want to do a simple structural modification only. */
7434 gsi_set_stmt (&iter, gtry);
7435 iter = gsi_start (gtry->eval);
7438 else
7439 gsi_next (&iter);
7442 gimplify_seq_add_seq (pre_p, body_sequence);
7443 if (temp)
7445 *expr_p = temp;
7446 return GS_OK;
7448 else
7450 *expr_p = NULL;
7451 return GS_ALL_DONE;
7455 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7456 is the cleanup action required. EH_ONLY is true if the cleanup should
7457 only be executed if an exception is thrown, not on normal exit.
7458 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7459 only valid for clobbers. */
7461 static void
7462 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
7463 bool force_uncond = false)
7465 gimple *wce;
7466 gimple_seq cleanup_stmts = NULL;
7468 /* Errors can result in improperly nested cleanups. Which results in
7469 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7470 if (seen_error ())
7471 return;
7473 if (gimple_conditional_context ())
7475 /* If we're in a conditional context, this is more complex. We only
7476 want to run the cleanup if we actually ran the initialization that
7477 necessitates it, but we want to run it after the end of the
7478 conditional context. So we wrap the try/finally around the
7479 condition and use a flag to determine whether or not to actually
7480 run the destructor. Thus
7482 test ? f(A()) : 0
7484 becomes (approximately)
7486 flag = 0;
7487 try {
7488 if (test) { A::A(temp); flag = 1; val = f(temp); }
7489 else { val = 0; }
7490 } finally {
7491 if (flag) A::~A(temp);
7495 if (force_uncond)
7497 gimplify_stmt (&cleanup, &cleanup_stmts);
7498 wce = gimple_build_wce (cleanup_stmts);
7499 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7501 else
7503 tree flag = create_tmp_var (boolean_type_node, "cleanup");
7504 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
7505 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
7507 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
7508 gimplify_stmt (&cleanup, &cleanup_stmts);
7509 wce = gimple_build_wce (cleanup_stmts);
7510 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7512 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
7513 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7514 gimplify_seq_add_stmt (pre_p, ftrue);
7516 /* Because of this manipulation, and the EH edges that jump
7517 threading cannot redirect, the temporary (VAR) will appear
7518 to be used uninitialized. Don't warn. */
7519 suppress_warning (var, OPT_Wuninitialized);
7522 else
7524 gimplify_stmt (&cleanup, &cleanup_stmts);
7525 wce = gimple_build_wce (cleanup_stmts);
7526 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7527 gimplify_seq_add_stmt (pre_p, wce);
7531 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7533 static enum gimplify_status
7534 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7536 tree targ = *expr_p;
7537 tree temp = TARGET_EXPR_SLOT (targ);
7538 tree init = TARGET_EXPR_INITIAL (targ);
7539 enum gimplify_status ret;
7541 bool unpoison_empty_seq = false;
7542 gimple_stmt_iterator unpoison_it;
7544 if (init)
7546 gimple_seq init_pre_p = NULL;
7548 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7549 to the temps list. Handle also variable length TARGET_EXPRs. */
7550 if (!poly_int_tree_p (DECL_SIZE (temp)))
7552 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
7553 gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
7554 /* FIXME: this is correct only when the size of the type does
7555 not depend on expressions evaluated in init. */
7556 gimplify_vla_decl (temp, &init_pre_p);
7558 else
7560 /* Save location where we need to place unpoisoning. It's possible
7561 that a variable will be converted to needs_to_live_in_memory. */
7562 unpoison_it = gsi_last (*pre_p);
7563 unpoison_empty_seq = gsi_end_p (unpoison_it);
7565 gimple_add_tmp_var (temp);
7568 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7569 expression is supposed to initialize the slot. */
7570 if (VOID_TYPE_P (TREE_TYPE (init)))
7571 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7572 fb_none);
7573 else
7575 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
7576 init = init_expr;
7577 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7578 fb_none);
7579 init = NULL;
7580 ggc_free (init_expr);
7582 if (ret == GS_ERROR)
7584 /* PR c++/28266 Make sure this is expanded only once. */
7585 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7586 return GS_ERROR;
7589 if (init)
7590 gimplify_and_add (init, &init_pre_p);
7592 /* Add a clobber for the temporary going out of scope, like
7593 gimplify_bind_expr. But only if we did not promote the
7594 temporary to static storage. */
7595 if (gimplify_ctxp->in_cleanup_point_expr
7596 && !TREE_STATIC (temp)
7597 && needs_to_live_in_memory (temp))
7599 if (flag_stack_reuse == SR_ALL)
7601 tree clobber = build_clobber (TREE_TYPE (temp),
7602 CLOBBER_STORAGE_END);
7603 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
7604 gimple_push_cleanup (temp, clobber, false, pre_p, true);
7606 if (asan_poisoned_variables
7607 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
7608 && !TREE_STATIC (temp)
7609 && dbg_cnt (asan_use_after_scope)
7610 && !gimplify_omp_ctxp)
7612 tree asan_cleanup = build_asan_poison_call_expr (temp);
7613 if (asan_cleanup)
7615 if (unpoison_empty_seq)
7616 unpoison_it = gsi_start (*pre_p);
7618 asan_poison_variable (temp, false, &unpoison_it,
7619 unpoison_empty_seq);
7620 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
7625 gimple_seq_add_seq (pre_p, init_pre_p);
7627 /* If needed, push the cleanup for the temp. */
7628 if (TARGET_EXPR_CLEANUP (targ))
7629 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
7630 CLEANUP_EH_ONLY (targ), pre_p);
7632 /* Only expand this once. */
7633 TREE_OPERAND (targ, 3) = init;
7634 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7636 else
7637 /* We should have expanded this before. */
7638 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
7640 *expr_p = temp;
7641 return GS_OK;
7644 /* Gimplification of expression trees. */
7646 /* Gimplify an expression which appears at statement context. The
7647 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7648 NULL, a new sequence is allocated.
7650 Return true if we actually added a statement to the queue. */
7652 bool
7653 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
7655 gimple_seq_node last;
7657 last = gimple_seq_last (*seq_p);
7658 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
7659 return last != gimple_seq_last (*seq_p);
7662 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7663 to CTX. If entries already exist, force them to be some flavor of private.
7664 If there is no enclosing parallel, do nothing. */
7666 void
7667 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
7669 splay_tree_node n;
7671 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
7672 return;
7676 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7677 if (n != NULL)
7679 if (n->value & GOVD_SHARED)
7680 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
7681 else if (n->value & GOVD_MAP)
7682 n->value |= GOVD_MAP_TO_ONLY;
7683 else
7684 return;
7686 else if ((ctx->region_type & ORT_TARGET) != 0)
7688 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
7689 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7690 else
7691 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
7693 else if (ctx->region_type != ORT_WORKSHARE
7694 && ctx->region_type != ORT_TASKGROUP
7695 && ctx->region_type != ORT_SIMD
7696 && ctx->region_type != ORT_ACC
7697 && !(ctx->region_type & ORT_TARGET_DATA))
7698 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7700 ctx = ctx->outer_context;
7702 while (ctx);
7705 /* Similarly for each of the type sizes of TYPE. */
7707 static void
7708 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
7710 if (type == NULL || type == error_mark_node)
7711 return;
7712 type = TYPE_MAIN_VARIANT (type);
7714 if (ctx->privatized_types->add (type))
7715 return;
7717 switch (TREE_CODE (type))
7719 case INTEGER_TYPE:
7720 case ENUMERAL_TYPE:
7721 case BOOLEAN_TYPE:
7722 case REAL_TYPE:
7723 case FIXED_POINT_TYPE:
7724 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
7725 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
7726 break;
7728 case ARRAY_TYPE:
7729 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7730 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
7731 break;
7733 case RECORD_TYPE:
7734 case UNION_TYPE:
7735 case QUAL_UNION_TYPE:
7737 tree field;
7738 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7739 if (TREE_CODE (field) == FIELD_DECL)
7741 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
7742 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
7745 break;
7747 case POINTER_TYPE:
7748 case REFERENCE_TYPE:
7749 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7750 break;
7752 default:
7753 break;
7756 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
7757 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
7758 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
7761 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7763 static void
7764 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
7766 splay_tree_node n;
7767 unsigned int nflags;
7768 tree t;
7770 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
7771 return;
7773 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7774 there are constructors involved somewhere. Exception is a shared clause,
7775 there is nothing privatized in that case. */
7776 if ((flags & GOVD_SHARED) == 0
7777 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
7778 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
7779 flags |= GOVD_SEEN;
7781 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7782 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7784 /* We shouldn't be re-adding the decl with the same data
7785 sharing class. */
7786 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
7787 nflags = n->value | flags;
7788 /* The only combination of data sharing classes we should see is
7789 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7790 reduction variables to be used in data sharing clauses. */
7791 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7792 || ((nflags & GOVD_DATA_SHARE_CLASS)
7793 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7794 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7795 n->value = nflags;
7796 return;
7799 /* When adding a variable-sized variable, we have to handle all sorts
7800 of additional bits of data: the pointer replacement variable, and
7801 the parameters of the type. */
7802 if (DECL_SIZE (decl) && !poly_int_tree_p (DECL_SIZE (decl)))
7804 /* Add the pointer replacement variable as PRIVATE if the variable
7805 replacement is private, else FIRSTPRIVATE since we'll need the
7806 address of the original variable either for SHARED, or for the
7807 copy into or out of the context. */
7808 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7810 if (flags & GOVD_MAP)
7811 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7812 else if (flags & GOVD_PRIVATE)
7813 nflags = GOVD_PRIVATE;
7814 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7815 && (flags & GOVD_FIRSTPRIVATE))
7816 || (ctx->region_type == ORT_TARGET_DATA
7817 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7818 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7819 else
7820 nflags = GOVD_FIRSTPRIVATE;
7821 nflags |= flags & GOVD_SEEN;
7822 t = DECL_VALUE_EXPR (decl);
7823 gcc_assert (INDIRECT_REF_P (t));
7824 t = TREE_OPERAND (t, 0);
7825 gcc_assert (DECL_P (t));
7826 omp_add_variable (ctx, t, nflags);
7829 /* Add all of the variable and type parameters (which should have
7830 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7831 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7832 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7833 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7835 /* The variable-sized variable itself is never SHARED, only some form
7836 of PRIVATE. The sharing would take place via the pointer variable
7837 which we remapped above. */
7838 if (flags & GOVD_SHARED)
7839 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7840 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7842 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7843 alloca statement we generate for the variable, so make sure it
7844 is available. This isn't automatically needed for the SHARED
7845 case, since we won't be allocating local storage then.
7846 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7847 in this case omp_notice_variable will be called later
7848 on when it is gimplified. */
7849 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7850 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7851 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7853 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7854 && omp_privatize_by_reference (decl))
7856 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7858 /* Similar to the direct variable sized case above, we'll need the
7859 size of references being privatized. */
7860 if ((flags & GOVD_SHARED) == 0)
7862 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7863 if (t && DECL_P (t))
7864 omp_notice_variable (ctx, t, true);
7868 if (n != NULL)
7869 n->value |= flags;
7870 else
7871 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7873 /* For reductions clauses in OpenACC loop directives, by default create a
7874 copy clause on the enclosing parallel construct for carrying back the
7875 results. */
7876 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7878 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7879 while (outer_ctx)
7881 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7882 if (n != NULL)
7884 /* Ignore local variables and explicitly declared clauses. */
7885 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7886 break;
7887 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7889 /* According to the OpenACC spec, such a reduction variable
7890 should already have a copy map on a kernels construct,
7891 verify that here. */
7892 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7893 && (n->value & GOVD_MAP));
7895 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7897 /* Remove firstprivate and make it a copy map. */
7898 n->value &= ~GOVD_FIRSTPRIVATE;
7899 n->value |= GOVD_MAP;
7902 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7904 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7905 GOVD_MAP | GOVD_SEEN);
7906 break;
7908 outer_ctx = outer_ctx->outer_context;
7913 /* Notice a threadprivate variable DECL used in OMP context CTX.
7914 This just prints out diagnostics about threadprivate variable uses
7915 in untied tasks. If DECL2 is non-NULL, prevent this warning
7916 on that variable. */
7918 static bool
7919 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7920 tree decl2)
7922 splay_tree_node n;
7923 struct gimplify_omp_ctx *octx;
7925 for (octx = ctx; octx; octx = octx->outer_context)
7926 if ((octx->region_type & ORT_TARGET) != 0
7927 || octx->order_concurrent)
7929 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7930 if (n == NULL)
7932 if (octx->order_concurrent)
7934 error ("threadprivate variable %qE used in a region with"
7935 " %<order(concurrent)%> clause", DECL_NAME (decl));
7936 inform (octx->location, "enclosing region");
7938 else
7940 error ("threadprivate variable %qE used in target region",
7941 DECL_NAME (decl));
7942 inform (octx->location, "enclosing target region");
7944 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7946 if (decl2)
7947 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7950 if (ctx->region_type != ORT_UNTIED_TASK)
7951 return false;
7952 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7953 if (n == NULL)
7955 error ("threadprivate variable %qE used in untied task",
7956 DECL_NAME (decl));
7957 inform (ctx->location, "enclosing task");
7958 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7960 if (decl2)
7961 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7962 return false;
7965 /* Return true if global var DECL is device resident. */
7967 static bool
7968 device_resident_p (tree decl)
7970 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7972 if (!attr)
7973 return false;
7975 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7977 tree c = TREE_VALUE (t);
7978 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7979 return true;
7982 return false;
7985 /* Return true if DECL has an ACC DECLARE attribute. */
7987 static bool
7988 is_oacc_declared (tree decl)
7990 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7991 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7992 return declared != NULL_TREE;
7995 /* Determine outer default flags for DECL mentioned in an OMP region
7996 but not declared in an enclosing clause.
7998 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7999 remapped firstprivate instead of shared. To some extent this is
8000 addressed in omp_firstprivatize_type_sizes, but not
8001 effectively. */
8003 static unsigned
8004 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
8005 bool in_code, unsigned flags)
8007 enum omp_clause_default_kind default_kind = ctx->default_kind;
8008 enum omp_clause_default_kind kind;
8010 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
8011 if (ctx->region_type & ORT_TASK)
8013 tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
8015 /* The event-handle specified by a detach clause should always be firstprivate,
8016 regardless of the current default. */
8017 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
8018 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
8020 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
8021 default_kind = kind;
8022 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
8023 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
8024 /* For C/C++ default({,first}private), variables with static storage duration
8025 declared in a namespace or global scope and referenced in construct
8026 must be explicitly specified, i.e. acts as default(none). */
8027 else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
8028 || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
8029 && VAR_P (decl)
8030 && is_global_var (decl)
8031 && (DECL_FILE_SCOPE_P (decl)
8032 || (DECL_CONTEXT (decl)
8033 && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
8034 && !lang_GNU_Fortran ())
8035 default_kind = OMP_CLAUSE_DEFAULT_NONE;
8037 switch (default_kind)
8039 case OMP_CLAUSE_DEFAULT_NONE:
8041 const char *rtype;
8043 if (ctx->region_type & ORT_PARALLEL)
8044 rtype = "parallel";
8045 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
8046 rtype = "taskloop";
8047 else if (ctx->region_type & ORT_TASK)
8048 rtype = "task";
8049 else if (ctx->region_type & ORT_TEAMS)
8050 rtype = "teams";
8051 else
8052 gcc_unreachable ();
8054 error ("%qE not specified in enclosing %qs",
8055 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
8056 inform (ctx->location, "enclosing %qs", rtype);
8058 /* FALLTHRU */
8059 case OMP_CLAUSE_DEFAULT_SHARED:
8060 flags |= GOVD_SHARED;
8061 break;
8062 case OMP_CLAUSE_DEFAULT_PRIVATE:
8063 flags |= GOVD_PRIVATE;
8064 break;
8065 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
8066 flags |= GOVD_FIRSTPRIVATE;
8067 break;
8068 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
8069 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
8070 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
8071 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
8073 omp_notice_variable (octx, decl, in_code);
8074 for (; octx; octx = octx->outer_context)
8076 splay_tree_node n2;
8078 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
8079 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
8080 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
8081 continue;
8082 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
8084 flags |= GOVD_FIRSTPRIVATE;
8085 goto found_outer;
8087 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
8089 flags |= GOVD_SHARED;
8090 goto found_outer;
8095 if (TREE_CODE (decl) == PARM_DECL
8096 || (!is_global_var (decl)
8097 && DECL_CONTEXT (decl) == current_function_decl))
8098 flags |= GOVD_FIRSTPRIVATE;
8099 else
8100 flags |= GOVD_SHARED;
8101 found_outer:
8102 break;
8104 default:
8105 gcc_unreachable ();
8108 return flags;
8111 /* Return string name for types of OpenACC constructs from ORT_* values. */
8113 static const char *
8114 oacc_region_type_name (enum omp_region_type region_type)
8116 switch (region_type)
8118 case ORT_ACC_DATA:
8119 return "data";
8120 case ORT_ACC_PARALLEL:
8121 return "parallel";
8122 case ORT_ACC_KERNELS:
8123 return "kernels";
8124 case ORT_ACC_SERIAL:
8125 return "serial";
8126 default:
8127 gcc_unreachable ();
8131 /* Determine outer default flags for DECL mentioned in an OACC region
8132 but not declared in an enclosing clause. */
8134 static unsigned
8135 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
8137 struct gimplify_omp_ctx *ctx_default = ctx;
8138 /* If no 'default' clause appears on this compute construct... */
8139 if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED)
8141 /* ..., see if one appears on a lexically containing 'data'
8142 construct. */
8143 while ((ctx_default = ctx_default->outer_context))
8145 if (ctx_default->region_type == ORT_ACC_DATA
8146 && ctx_default->default_kind != OMP_CLAUSE_DEFAULT_SHARED)
8147 break;
8149 /* If not, reset. */
8150 if (!ctx_default)
8151 ctx_default = ctx;
8154 bool on_device = false;
8155 bool is_private = false;
8156 bool declared = is_oacc_declared (decl);
8157 tree type = TREE_TYPE (decl);
8159 if (omp_privatize_by_reference (decl))
8160 type = TREE_TYPE (type);
8162 /* For Fortran COMMON blocks, only used variables in those blocks are
8163 transfered and remapped. The block itself will have a private clause to
8164 avoid transfering the data twice.
8165 The hook evaluates to false by default. For a variable in Fortran's COMMON
8166 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
8167 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
8168 the whole block. For C++ and Fortran, it can also be true under certain
8169 other conditions, if DECL_HAS_VALUE_EXPR. */
8170 if (RECORD_OR_UNION_TYPE_P (type))
8171 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
8173 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
8174 && is_global_var (decl)
8175 && device_resident_p (decl)
8176 && !is_private)
8178 on_device = true;
8179 flags |= GOVD_MAP_TO_ONLY;
8182 switch (ctx->region_type)
8184 case ORT_ACC_KERNELS:
8185 if (is_private)
8186 flags |= GOVD_FIRSTPRIVATE;
8187 else if (AGGREGATE_TYPE_P (type))
8189 /* Aggregates default to 'present_or_copy', or 'present'. */
8190 if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
8191 flags |= GOVD_MAP;
8192 else
8193 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
8195 else
8196 /* Scalars default to 'copy'. */
8197 flags |= GOVD_MAP | GOVD_MAP_FORCE;
8199 break;
8201 case ORT_ACC_PARALLEL:
8202 case ORT_ACC_SERIAL:
8203 if (is_private)
8204 flags |= GOVD_FIRSTPRIVATE;
8205 else if (on_device || declared)
8206 flags |= GOVD_MAP;
8207 else if (AGGREGATE_TYPE_P (type))
8209 /* Aggregates default to 'present_or_copy', or 'present'. */
8210 if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
8211 flags |= GOVD_MAP;
8212 else
8213 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
8215 else
8216 /* Scalars default to 'firstprivate'. */
8217 flags |= GOVD_FIRSTPRIVATE;
8219 break;
8221 default:
8222 gcc_unreachable ();
8225 if (DECL_ARTIFICIAL (decl))
8226 ; /* We can get compiler-generated decls, and should not complain
8227 about them. */
8228 else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_NONE)
8230 error ("%qE not specified in enclosing OpenACC %qs construct",
8231 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)),
8232 oacc_region_type_name (ctx->region_type));
8233 if (ctx_default != ctx)
8234 inform (ctx->location, "enclosing OpenACC %qs construct and",
8235 oacc_region_type_name (ctx->region_type));
8236 inform (ctx_default->location,
8237 "enclosing OpenACC %qs construct with %qs clause",
8238 oacc_region_type_name (ctx_default->region_type),
8239 "default(none)");
8241 else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
8242 ; /* Handled above. */
8243 else
8244 gcc_checking_assert (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
8246 return flags;
8249 /* Record the fact that DECL was used within the OMP context CTX.
8250 IN_CODE is true when real code uses DECL, and false when we should
8251 merely emit default(none) errors. Return true if DECL is going to
8252 be remapped and thus DECL shouldn't be gimplified into its
8253 DECL_VALUE_EXPR (if any). */
8255 static bool
8256 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
8258 splay_tree_node n;
8259 unsigned flags = in_code ? GOVD_SEEN : 0;
8260 bool ret = false, shared;
8262 if (error_operand_p (decl))
8263 return false;
8265 if (DECL_ARTIFICIAL (decl))
8267 tree attr = lookup_attribute ("omp allocate var", DECL_ATTRIBUTES (decl));
8268 if (attr)
8269 decl = TREE_VALUE (TREE_VALUE (attr));
8272 if (ctx->region_type == ORT_NONE)
8273 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
8275 if (is_global_var (decl))
8277 /* Threadprivate variables are predetermined. */
8278 if (DECL_THREAD_LOCAL_P (decl))
8279 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
8281 if (DECL_HAS_VALUE_EXPR_P (decl))
8283 if (ctx->region_type & ORT_ACC)
8284 /* For OpenACC, defer expansion of value to avoid transfering
8285 privatized common block data instead of im-/explicitly transfered
8286 variables which are in common blocks. */
8288 else
8290 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8292 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
8293 return omp_notice_threadprivate_variable (ctx, decl, value);
8297 if (gimplify_omp_ctxp->outer_context == NULL
8298 && VAR_P (decl)
8299 && oacc_get_fn_attrib (current_function_decl))
8301 location_t loc = DECL_SOURCE_LOCATION (decl);
8303 if (lookup_attribute ("omp declare target link",
8304 DECL_ATTRIBUTES (decl)))
8306 error_at (loc,
8307 "%qE with %<link%> clause used in %<routine%> function",
8308 DECL_NAME (decl));
8309 return false;
8311 else if (!lookup_attribute ("omp declare target",
8312 DECL_ATTRIBUTES (decl)))
8314 error_at (loc,
8315 "%qE requires a %<declare%> directive for use "
8316 "in a %<routine%> function", DECL_NAME (decl));
8317 return false;
8322 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8323 if ((ctx->region_type & ORT_TARGET) != 0)
8325 if (n == NULL)
8327 unsigned nflags = flags;
8328 if ((ctx->region_type & ORT_ACC) == 0)
8330 bool is_declare_target = false;
8331 if (is_global_var (decl)
8332 && varpool_node::get_create (decl)->offloadable)
8334 struct gimplify_omp_ctx *octx;
8335 for (octx = ctx->outer_context;
8336 octx; octx = octx->outer_context)
8338 n = splay_tree_lookup (octx->variables,
8339 (splay_tree_key)decl);
8340 if (n
8341 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
8342 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8343 break;
8345 is_declare_target = octx == NULL;
8347 if (!is_declare_target)
8349 int gdmk;
8350 enum omp_clause_defaultmap_kind kind;
8351 if (lang_hooks.decls.omp_allocatable_p (decl))
8352 gdmk = GDMK_ALLOCATABLE;
8353 else if (lang_hooks.decls.omp_scalar_target_p (decl))
8354 gdmk = GDMK_SCALAR_TARGET;
8355 else if (lang_hooks.decls.omp_scalar_p (decl, false))
8356 gdmk = GDMK_SCALAR;
8357 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
8358 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8359 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
8360 == POINTER_TYPE)))
8361 gdmk = GDMK_POINTER;
8362 else
8363 gdmk = GDMK_AGGREGATE;
8364 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
8365 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
8367 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
8368 nflags |= GOVD_FIRSTPRIVATE;
8369 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
8370 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
8371 else
8372 gcc_unreachable ();
8374 else if (ctx->defaultmap[gdmk] == 0)
8376 tree d = lang_hooks.decls.omp_report_decl (decl);
8377 error ("%qE not specified in enclosing %<target%>",
8378 DECL_NAME (d));
8379 inform (ctx->location, "enclosing %<target%>");
8381 else if (ctx->defaultmap[gdmk]
8382 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
8383 nflags |= ctx->defaultmap[gdmk];
8384 else if (ctx->defaultmap[gdmk] & GOVD_MAP_FORCE_PRESENT)
8386 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
8387 nflags |= ctx->defaultmap[gdmk] | GOVD_MAP_ALLOC_ONLY;
8389 else
8391 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
8392 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
8397 struct gimplify_omp_ctx *octx = ctx->outer_context;
8398 if ((ctx->region_type & ORT_ACC) && octx)
8400 /* Look in outer OpenACC contexts, to see if there's a
8401 data attribute for this variable. */
8402 omp_notice_variable (octx, decl, in_code);
8404 for (; octx; octx = octx->outer_context)
8406 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
8407 break;
8408 splay_tree_node n2
8409 = splay_tree_lookup (octx->variables,
8410 (splay_tree_key) decl);
8411 if (n2)
8413 if (octx->region_type == ORT_ACC_HOST_DATA)
8414 error ("variable %qE declared in enclosing "
8415 "%<host_data%> region", DECL_NAME (decl));
8416 nflags |= GOVD_MAP;
8417 if (octx->region_type == ORT_ACC_DATA
8418 && (n2->value & GOVD_MAP_0LEN_ARRAY))
8419 nflags |= GOVD_MAP_0LEN_ARRAY;
8420 goto found_outer;
8425 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
8426 | GOVD_MAP_ALLOC_ONLY)) == flags)
8428 tree type = TREE_TYPE (decl);
8430 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8431 && omp_privatize_by_reference (decl))
8432 type = TREE_TYPE (type);
8433 if (!omp_mappable_type (type))
8435 error ("%qD referenced in target region does not have "
8436 "a mappable type", decl);
8437 nflags |= GOVD_MAP | GOVD_EXPLICIT;
8439 else
8441 if ((ctx->region_type & ORT_ACC) != 0)
8442 nflags = oacc_default_clause (ctx, decl, flags);
8443 else
8444 nflags |= GOVD_MAP;
8447 found_outer:
8448 omp_add_variable (ctx, decl, nflags);
8449 if (ctx->region_type & ORT_ACC)
8450 /* For OpenACC, as remarked above, defer expansion. */
8451 shared = false;
8452 else
8453 shared = (nflags & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0;
8454 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8456 else
8458 if (ctx->region_type & ORT_ACC)
8459 /* For OpenACC, as remarked above, defer expansion. */
8460 shared = false;
8461 else
8462 shared = ((n->value | flags)
8463 & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0;
8464 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8465 /* If nothing changed, there's nothing left to do. */
8466 if ((n->value & flags) == flags)
8467 return ret;
8468 flags |= n->value;
8469 n->value = flags;
8471 goto do_outer;
8474 if (n == NULL)
8476 if (ctx->region_type == ORT_WORKSHARE
8477 || ctx->region_type == ORT_TASKGROUP
8478 || ctx->region_type == ORT_SIMD
8479 || ctx->region_type == ORT_ACC
8480 || (ctx->region_type & ORT_TARGET_DATA) != 0)
8481 goto do_outer;
8483 flags = omp_default_clause (ctx, decl, in_code, flags);
8485 if ((flags & GOVD_PRIVATE)
8486 && lang_hooks.decls.omp_private_outer_ref (decl))
8487 flags |= GOVD_PRIVATE_OUTER_REF;
8489 omp_add_variable (ctx, decl, flags);
8491 shared = (flags & GOVD_SHARED) != 0;
8492 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8493 goto do_outer;
8496 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
8497 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
8498 if (ctx->region_type == ORT_SIMD
8499 && ctx->in_for_exprs
8500 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
8501 == GOVD_PRIVATE))
8502 flags &= ~GOVD_SEEN;
8504 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
8505 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
8506 && DECL_SIZE (decl))
8508 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8510 splay_tree_node n2;
8511 tree t = DECL_VALUE_EXPR (decl);
8512 gcc_assert (INDIRECT_REF_P (t));
8513 t = TREE_OPERAND (t, 0);
8514 gcc_assert (DECL_P (t));
8515 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8516 n2->value |= GOVD_SEEN;
8518 else if (omp_privatize_by_reference (decl)
8519 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
8520 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
8521 != INTEGER_CST))
8523 splay_tree_node n2;
8524 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
8525 gcc_assert (DECL_P (t));
8526 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8527 if (n2)
8528 omp_notice_variable (ctx, t, true);
8532 if (ctx->region_type & ORT_ACC)
8533 /* For OpenACC, as remarked above, defer expansion. */
8534 shared = false;
8535 else
8536 shared = ((flags | n->value) & GOVD_SHARED) != 0;
8537 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8539 /* If nothing changed, there's nothing left to do. */
8540 if ((n->value & flags) == flags)
8541 return ret;
8542 flags |= n->value;
8543 n->value = flags;
8545 do_outer:
8546 /* If the variable is private in the current context, then we don't
8547 need to propagate anything to an outer context. */
8548 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
8549 return ret;
8550 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8551 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8552 return ret;
8553 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8554 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8555 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8556 return ret;
8557 if (ctx->outer_context
8558 && omp_notice_variable (ctx->outer_context, decl, in_code))
8559 return true;
8560 return ret;
8563 /* Verify that DECL is private within CTX. If there's specific information
8564 to the contrary in the innermost scope, generate an error. */
8566 static bool
8567 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
8569 splay_tree_node n;
8571 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8572 if (n != NULL)
8574 if (n->value & GOVD_SHARED)
8576 if (ctx == gimplify_omp_ctxp)
8578 if (simd)
8579 error ("iteration variable %qE is predetermined linear",
8580 DECL_NAME (decl));
8581 else
8582 error ("iteration variable %qE should be private",
8583 DECL_NAME (decl));
8584 n->value = GOVD_PRIVATE;
8585 return true;
8587 else
8588 return false;
8590 else if ((n->value & GOVD_EXPLICIT) != 0
8591 && (ctx == gimplify_omp_ctxp
8592 || (ctx->region_type == ORT_COMBINED_PARALLEL
8593 && gimplify_omp_ctxp->outer_context == ctx)))
8595 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
8596 error ("iteration variable %qE should not be firstprivate",
8597 DECL_NAME (decl));
8598 else if ((n->value & GOVD_REDUCTION) != 0)
8599 error ("iteration variable %qE should not be reduction",
8600 DECL_NAME (decl));
8601 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
8602 error ("iteration variable %qE should not be linear",
8603 DECL_NAME (decl));
8605 return (ctx == gimplify_omp_ctxp
8606 || (ctx->region_type == ORT_COMBINED_PARALLEL
8607 && gimplify_omp_ctxp->outer_context == ctx));
8610 if (ctx->region_type != ORT_WORKSHARE
8611 && ctx->region_type != ORT_TASKGROUP
8612 && ctx->region_type != ORT_SIMD
8613 && ctx->region_type != ORT_ACC)
8614 return false;
8615 else if (ctx->outer_context)
8616 return omp_is_private (ctx->outer_context, decl, simd);
8617 return false;
8620 /* Return true if DECL is private within a parallel region
8621 that binds to the current construct's context or in parallel
8622 region's REDUCTION clause. */
8624 static bool
8625 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
8627 splay_tree_node n;
8631 ctx = ctx->outer_context;
8632 if (ctx == NULL)
8634 if (is_global_var (decl))
8635 return false;
8637 /* References might be private, but might be shared too,
8638 when checking for copyprivate, assume they might be
8639 private, otherwise assume they might be shared. */
8640 if (copyprivate)
8641 return true;
8643 if (omp_privatize_by_reference (decl))
8644 return false;
8646 /* Treat C++ privatized non-static data members outside
8647 of the privatization the same. */
8648 if (omp_member_access_dummy_var (decl))
8649 return false;
8651 return true;
8654 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8656 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8657 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
8659 if ((ctx->region_type & ORT_TARGET_DATA) != 0
8660 || n == NULL
8661 || (n->value & GOVD_MAP) == 0)
8662 continue;
8663 return false;
8666 if (n != NULL)
8668 if ((n->value & GOVD_LOCAL) != 0
8669 && omp_member_access_dummy_var (decl))
8670 return false;
8671 return (n->value & GOVD_SHARED) == 0;
8674 if (ctx->region_type == ORT_WORKSHARE
8675 || ctx->region_type == ORT_TASKGROUP
8676 || ctx->region_type == ORT_SIMD
8677 || ctx->region_type == ORT_ACC)
8678 continue;
8680 break;
8682 while (1);
8683 return false;
8686 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8688 static tree
8689 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
8691 tree t = *tp;
8693 /* If this node has been visited, unmark it and keep looking. */
8694 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
8695 return t;
8697 if (IS_TYPE_OR_DECL_P (t))
8698 *walk_subtrees = 0;
8699 return NULL_TREE;
8703 /* Gimplify the affinity clause but effectively ignore it.
8704 Generate:
8705 var = begin;
8706 if ((step > 1) ? var <= end : var > end)
8707 locatator_var_expr; */
8709 static void
8710 gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
8712 tree last_iter = NULL_TREE;
8713 tree last_bind = NULL_TREE;
8714 tree label = NULL_TREE;
8715 tree *last_body = NULL;
8716 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8717 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
8719 tree t = OMP_CLAUSE_DECL (c);
8720 if (TREE_CODE (t) == TREE_LIST
8721 && TREE_PURPOSE (t)
8722 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8724 if (TREE_VALUE (t) == null_pointer_node)
8725 continue;
8726 if (TREE_PURPOSE (t) != last_iter)
8728 if (last_bind)
8730 append_to_statement_list (label, last_body);
8731 gimplify_and_add (last_bind, pre_p);
8732 last_bind = NULL_TREE;
8734 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8736 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8737 is_gimple_val, fb_rvalue) == GS_ERROR
8738 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8739 is_gimple_val, fb_rvalue) == GS_ERROR
8740 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8741 is_gimple_val, fb_rvalue) == GS_ERROR
8742 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8743 is_gimple_val, fb_rvalue)
8744 == GS_ERROR))
8745 return;
8747 last_iter = TREE_PURPOSE (t);
8748 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8749 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
8750 NULL, block);
8751 last_body = &BIND_EXPR_BODY (last_bind);
8752 tree cond = NULL_TREE;
8753 location_t loc = OMP_CLAUSE_LOCATION (c);
8754 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8756 tree var = TREE_VEC_ELT (it, 0);
8757 tree begin = TREE_VEC_ELT (it, 1);
8758 tree end = TREE_VEC_ELT (it, 2);
8759 tree step = TREE_VEC_ELT (it, 3);
8760 loc = DECL_SOURCE_LOCATION (var);
8761 tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8762 var, begin);
8763 append_to_statement_list_force (tem, last_body);
8765 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8766 step, build_zero_cst (TREE_TYPE (step)));
8767 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
8768 var, end);
8769 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8770 var, end);
8771 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
8772 cond1, cond2, cond3);
8773 if (cond)
8774 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
8775 boolean_type_node, cond, cond1);
8776 else
8777 cond = cond1;
8779 tree cont_label = create_artificial_label (loc);
8780 label = build1 (LABEL_EXPR, void_type_node, cont_label);
8781 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
8782 void_node,
8783 build_and_jump (&cont_label));
8784 append_to_statement_list_force (tem, last_body);
8786 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8788 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
8789 last_body);
8790 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8792 if (error_operand_p (TREE_VALUE (t)))
8793 return;
8794 append_to_statement_list_force (TREE_VALUE (t), last_body);
8795 TREE_VALUE (t) = null_pointer_node;
8797 else
8799 if (last_bind)
8801 append_to_statement_list (label, last_body);
8802 gimplify_and_add (last_bind, pre_p);
8803 last_bind = NULL_TREE;
8805 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8807 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8808 NULL, is_gimple_val, fb_rvalue);
8809 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8811 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8812 return;
8813 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8814 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
8815 return;
8816 gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
8819 if (last_bind)
8821 append_to_statement_list (label, last_body);
8822 gimplify_and_add (last_bind, pre_p);
8824 return;
8827 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8828 lower all the depend clauses by populating corresponding depend
8829 array. Returns 0 if there are no such depend clauses, or
8830 2 if all depend clauses should be removed, 1 otherwise. */
8832 static int
8833 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
8835 tree c;
8836 gimple *g;
8837 size_t n[5] = { 0, 0, 0, 0, 0 };
8838 bool unused[5];
8839 tree counts[5] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
8840 tree last_iter = NULL_TREE, last_count = NULL_TREE;
8841 size_t i, j;
8842 location_t first_loc = UNKNOWN_LOCATION;
8844 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8845 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8847 switch (OMP_CLAUSE_DEPEND_KIND (c))
8849 case OMP_CLAUSE_DEPEND_IN:
8850 i = 2;
8851 break;
8852 case OMP_CLAUSE_DEPEND_OUT:
8853 case OMP_CLAUSE_DEPEND_INOUT:
8854 i = 0;
8855 break;
8856 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8857 i = 1;
8858 break;
8859 case OMP_CLAUSE_DEPEND_DEPOBJ:
8860 i = 3;
8861 break;
8862 case OMP_CLAUSE_DEPEND_INOUTSET:
8863 i = 4;
8864 break;
8865 default:
8866 gcc_unreachable ();
8868 tree t = OMP_CLAUSE_DECL (c);
8869 if (first_loc == UNKNOWN_LOCATION)
8870 first_loc = OMP_CLAUSE_LOCATION (c);
8871 if (TREE_CODE (t) == TREE_LIST
8872 && TREE_PURPOSE (t)
8873 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8875 if (TREE_PURPOSE (t) != last_iter)
8877 tree tcnt = size_one_node;
8878 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8880 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8881 is_gimple_val, fb_rvalue) == GS_ERROR
8882 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8883 is_gimple_val, fb_rvalue) == GS_ERROR
8884 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8885 is_gimple_val, fb_rvalue) == GS_ERROR
8886 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8887 is_gimple_val, fb_rvalue)
8888 == GS_ERROR))
8889 return 2;
8890 tree var = TREE_VEC_ELT (it, 0);
8891 tree begin = TREE_VEC_ELT (it, 1);
8892 tree end = TREE_VEC_ELT (it, 2);
8893 tree step = TREE_VEC_ELT (it, 3);
8894 tree orig_step = TREE_VEC_ELT (it, 4);
8895 tree type = TREE_TYPE (var);
8896 tree stype = TREE_TYPE (step);
8897 location_t loc = DECL_SOURCE_LOCATION (var);
8898 tree endmbegin;
8899 /* Compute count for this iterator as
8900 orig_step > 0
8901 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8902 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8903 and compute product of those for the entire depend
8904 clause. */
8905 if (POINTER_TYPE_P (type))
8906 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8907 stype, end, begin);
8908 else
8909 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8910 end, begin);
8911 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8912 step,
8913 build_int_cst (stype, 1));
8914 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8915 build_int_cst (stype, 1));
8916 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8917 unshare_expr (endmbegin),
8918 stepm1);
8919 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8920 pos, step);
8921 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8922 endmbegin, stepp1);
8923 if (TYPE_UNSIGNED (stype))
8925 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8926 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8928 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8929 neg, step);
8930 step = NULL_TREE;
8931 tree cond = fold_build2_loc (loc, LT_EXPR,
8932 boolean_type_node,
8933 begin, end);
8934 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8935 build_int_cst (stype, 0));
8936 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8937 end, begin);
8938 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8939 build_int_cst (stype, 0));
8940 tree osteptype = TREE_TYPE (orig_step);
8941 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8942 orig_step,
8943 build_int_cst (osteptype, 0));
8944 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8945 cond, pos, neg);
8946 cnt = fold_convert_loc (loc, sizetype, cnt);
8947 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8948 fb_rvalue) == GS_ERROR)
8949 return 2;
8950 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8952 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8953 fb_rvalue) == GS_ERROR)
8954 return 2;
8955 last_iter = TREE_PURPOSE (t);
8956 last_count = tcnt;
8958 if (counts[i] == NULL_TREE)
8959 counts[i] = last_count;
8960 else
8961 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8962 PLUS_EXPR, counts[i], last_count);
8964 else
8965 n[i]++;
8967 for (i = 0; i < 5; i++)
8968 if (counts[i])
8969 break;
8970 if (i == 5)
8971 return 0;
8973 tree total = size_zero_node;
8974 for (i = 0; i < 5; i++)
8976 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8977 if (counts[i] == NULL_TREE)
8978 counts[i] = size_zero_node;
8979 if (n[i])
8980 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8981 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8982 fb_rvalue) == GS_ERROR)
8983 return 2;
8984 total = size_binop (PLUS_EXPR, total, counts[i]);
8987 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8988 == GS_ERROR)
8989 return 2;
8990 bool is_old = unused[1] && unused[3] && unused[4];
8991 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8992 size_int (is_old ? 1 : 4));
8993 if (!unused[4])
8994 totalpx = size_binop (PLUS_EXPR, totalpx,
8995 size_binop (MULT_EXPR, counts[4], size_int (2)));
8996 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8997 tree array = create_tmp_var_raw (type);
8998 TREE_ADDRESSABLE (array) = 1;
8999 if (!poly_int_tree_p (totalpx))
9001 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
9002 gimplify_type_sizes (TREE_TYPE (array), pre_p);
9003 if (gimplify_omp_ctxp)
9005 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9006 while (ctx
9007 && (ctx->region_type == ORT_WORKSHARE
9008 || ctx->region_type == ORT_TASKGROUP
9009 || ctx->region_type == ORT_SIMD
9010 || ctx->region_type == ORT_ACC))
9011 ctx = ctx->outer_context;
9012 if (ctx)
9013 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
9015 gimplify_vla_decl (array, pre_p);
9017 else
9018 gimple_add_tmp_var (array);
9019 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
9020 NULL_TREE);
9021 tree tem;
9022 if (!is_old)
9024 tem = build2 (MODIFY_EXPR, void_type_node, r,
9025 build_int_cst (ptr_type_node, 0));
9026 gimplify_and_add (tem, pre_p);
9027 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
9028 NULL_TREE);
9030 tem = build2 (MODIFY_EXPR, void_type_node, r,
9031 fold_convert (ptr_type_node, total));
9032 gimplify_and_add (tem, pre_p);
9033 for (i = 1; i < (is_old ? 2 : 4); i++)
9035 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
9036 NULL_TREE, NULL_TREE);
9037 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
9038 gimplify_and_add (tem, pre_p);
9041 tree cnts[6];
9042 for (j = 5; j; j--)
9043 if (!unused[j - 1])
9044 break;
9045 for (i = 0; i < 5; i++)
9047 if (i && (i >= j || unused[i - 1]))
9049 cnts[i] = cnts[i - 1];
9050 continue;
9052 cnts[i] = create_tmp_var (sizetype);
9053 if (i == 0)
9054 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
9055 else
9057 tree t;
9058 if (is_old)
9059 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
9060 else
9061 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
9062 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
9063 == GS_ERROR)
9064 return 2;
9065 g = gimple_build_assign (cnts[i], t);
9067 gimple_seq_add_stmt (pre_p, g);
9069 if (unused[4])
9070 cnts[5] = NULL_TREE;
9071 else
9073 tree t = size_binop (PLUS_EXPR, total, size_int (5));
9074 cnts[5] = create_tmp_var (sizetype);
9075 g = gimple_build_assign (cnts[i], t);
9076 gimple_seq_add_stmt (pre_p, g);
9079 last_iter = NULL_TREE;
9080 tree last_bind = NULL_TREE;
9081 tree *last_body = NULL;
9082 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
9083 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
9085 switch (OMP_CLAUSE_DEPEND_KIND (c))
9087 case OMP_CLAUSE_DEPEND_IN:
9088 i = 2;
9089 break;
9090 case OMP_CLAUSE_DEPEND_OUT:
9091 case OMP_CLAUSE_DEPEND_INOUT:
9092 i = 0;
9093 break;
9094 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
9095 i = 1;
9096 break;
9097 case OMP_CLAUSE_DEPEND_DEPOBJ:
9098 i = 3;
9099 break;
9100 case OMP_CLAUSE_DEPEND_INOUTSET:
9101 i = 4;
9102 break;
9103 default:
9104 gcc_unreachable ();
9106 tree t = OMP_CLAUSE_DECL (c);
9107 if (TREE_CODE (t) == TREE_LIST
9108 && TREE_PURPOSE (t)
9109 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
9111 if (TREE_PURPOSE (t) != last_iter)
9113 if (last_bind)
9114 gimplify_and_add (last_bind, pre_p);
9115 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
9116 last_bind = build3 (BIND_EXPR, void_type_node,
9117 BLOCK_VARS (block), NULL, block);
9118 TREE_SIDE_EFFECTS (last_bind) = 1;
9119 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
9120 tree *p = &BIND_EXPR_BODY (last_bind);
9121 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
9123 tree var = TREE_VEC_ELT (it, 0);
9124 tree begin = TREE_VEC_ELT (it, 1);
9125 tree end = TREE_VEC_ELT (it, 2);
9126 tree step = TREE_VEC_ELT (it, 3);
9127 tree orig_step = TREE_VEC_ELT (it, 4);
9128 tree type = TREE_TYPE (var);
9129 location_t loc = DECL_SOURCE_LOCATION (var);
9130 /* Emit:
9131 var = begin;
9132 goto cond_label;
9133 beg_label:
9135 var = var + step;
9136 cond_label:
9137 if (orig_step > 0) {
9138 if (var < end) goto beg_label;
9139 } else {
9140 if (var > end) goto beg_label;
9142 for each iterator, with inner iterators added to
9143 the ... above. */
9144 tree beg_label = create_artificial_label (loc);
9145 tree cond_label = NULL_TREE;
9146 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
9147 var, begin);
9148 append_to_statement_list_force (tem, p);
9149 tem = build_and_jump (&cond_label);
9150 append_to_statement_list_force (tem, p);
9151 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
9152 append_to_statement_list (tem, p);
9153 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
9154 NULL_TREE, NULL_TREE);
9155 TREE_SIDE_EFFECTS (bind) = 1;
9156 SET_EXPR_LOCATION (bind, loc);
9157 append_to_statement_list_force (bind, p);
9158 if (POINTER_TYPE_P (type))
9159 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
9160 var, fold_convert_loc (loc, sizetype,
9161 step));
9162 else
9163 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
9164 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
9165 var, tem);
9166 append_to_statement_list_force (tem, p);
9167 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
9168 append_to_statement_list (tem, p);
9169 tree cond = fold_build2_loc (loc, LT_EXPR,
9170 boolean_type_node,
9171 var, end);
9172 tree pos
9173 = fold_build3_loc (loc, COND_EXPR, void_type_node,
9174 cond, build_and_jump (&beg_label),
9175 void_node);
9176 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
9177 var, end);
9178 tree neg
9179 = fold_build3_loc (loc, COND_EXPR, void_type_node,
9180 cond, build_and_jump (&beg_label),
9181 void_node);
9182 tree osteptype = TREE_TYPE (orig_step);
9183 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
9184 orig_step,
9185 build_int_cst (osteptype, 0));
9186 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
9187 cond, pos, neg);
9188 append_to_statement_list_force (tem, p);
9189 p = &BIND_EXPR_BODY (bind);
9191 last_body = p;
9193 last_iter = TREE_PURPOSE (t);
9194 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
9196 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
9197 0), last_body);
9198 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
9200 if (error_operand_p (TREE_VALUE (t)))
9201 return 2;
9202 if (TREE_VALUE (t) != null_pointer_node)
9203 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
9204 if (i == 4)
9206 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9207 NULL_TREE, NULL_TREE);
9208 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
9209 NULL_TREE, NULL_TREE);
9210 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
9211 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9212 void_type_node, r, r2);
9213 append_to_statement_list_force (tem, last_body);
9214 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9215 void_type_node, cnts[i],
9216 size_binop (PLUS_EXPR, cnts[i],
9217 size_int (1)));
9218 append_to_statement_list_force (tem, last_body);
9219 i = 5;
9221 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9222 NULL_TREE, NULL_TREE);
9223 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9224 void_type_node, r, TREE_VALUE (t));
9225 append_to_statement_list_force (tem, last_body);
9226 if (i == 5)
9228 r = build4 (ARRAY_REF, ptr_type_node, array,
9229 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
9230 NULL_TREE, NULL_TREE);
9231 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
9232 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9233 void_type_node, r, tem);
9234 append_to_statement_list_force (tem, last_body);
9236 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
9237 void_type_node, cnts[i],
9238 size_binop (PLUS_EXPR, cnts[i],
9239 size_int (1 + (i == 5))));
9240 append_to_statement_list_force (tem, last_body);
9241 TREE_VALUE (t) = null_pointer_node;
9243 else
9245 if (last_bind)
9247 gimplify_and_add (last_bind, pre_p);
9248 last_bind = NULL_TREE;
9250 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9252 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9253 NULL, is_gimple_val, fb_rvalue);
9254 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9256 if (error_operand_p (OMP_CLAUSE_DECL (c)))
9257 return 2;
9258 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
9259 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9260 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9261 is_gimple_val, fb_rvalue) == GS_ERROR)
9262 return 2;
9263 if (i == 4)
9265 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9266 NULL_TREE, NULL_TREE);
9267 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
9268 NULL_TREE, NULL_TREE);
9269 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
9270 tem = build2 (MODIFY_EXPR, void_type_node, r, r2);
9271 gimplify_and_add (tem, pre_p);
9272 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR,
9273 cnts[i],
9274 size_int (1)));
9275 gimple_seq_add_stmt (pre_p, g);
9276 i = 5;
9278 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9279 NULL_TREE, NULL_TREE);
9280 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
9281 gimplify_and_add (tem, pre_p);
9282 if (i == 5)
9284 r = build4 (ARRAY_REF, ptr_type_node, array,
9285 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
9286 NULL_TREE, NULL_TREE);
9287 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
9288 tem = build2 (MODIFY_EXPR, void_type_node, r, tem);
9289 append_to_statement_list_force (tem, last_body);
9290 gimplify_and_add (tem, pre_p);
9292 g = gimple_build_assign (cnts[i],
9293 size_binop (PLUS_EXPR, cnts[i],
9294 size_int (1 + (i == 5))));
9295 gimple_seq_add_stmt (pre_p, g);
9298 if (last_bind)
9299 gimplify_and_add (last_bind, pre_p);
9300 tree cond = boolean_false_node;
9301 if (is_old)
9303 if (!unused[0])
9304 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
9305 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
9306 size_int (2)));
9307 if (!unused[2])
9308 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
9309 build2_loc (first_loc, NE_EXPR, boolean_type_node,
9310 cnts[2],
9311 size_binop_loc (first_loc, PLUS_EXPR,
9312 totalpx,
9313 size_int (1))));
9315 else
9317 tree prev = size_int (5);
9318 for (i = 0; i < 5; i++)
9320 if (unused[i])
9321 continue;
9322 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
9323 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
9324 build2_loc (first_loc, NE_EXPR, boolean_type_node,
9325 cnts[i], unshare_expr (prev)));
9328 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
9329 build_call_expr_loc (first_loc,
9330 builtin_decl_explicit (BUILT_IN_TRAP),
9331 0), void_node);
9332 gimplify_and_add (tem, pre_p);
9333 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
9334 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
9335 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
9336 OMP_CLAUSE_CHAIN (c) = *list_p;
9337 *list_p = c;
9338 return 1;
9341 /* True if mapping node C maps, or unmaps, a (Fortran) array descriptor. */
9343 static bool
9344 omp_map_clause_descriptor_p (tree c)
9346 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
9347 return false;
9349 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
9350 return true;
9352 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_RELEASE
9353 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DELETE)
9354 && OMP_CLAUSE_RELEASE_DESCRIPTOR (c))
9355 return true;
9357 return false;
9360 /* For a set of mappings describing an array section pointed to by a struct
9361 (or derived type, etc.) component, create an "alloc" or "release" node to
9362 insert into a list following a GOMP_MAP_STRUCT node. For some types of
9363 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
9364 be created that is inserted into the list of mapping nodes attached to the
9365 directive being processed -- not part of the sorted list of nodes after
9366 GOMP_MAP_STRUCT.
9368 CODE is the code of the directive being processed. GRP_START and GRP_END
9369 are the first and last of two or three nodes representing this array section
9370 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
9371 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
9372 filled with the additional node described above, if needed.
9374 This function does not add the new nodes to any lists itself. It is the
9375 responsibility of the caller to do that. */
9377 static tree
9378 build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
9379 tree *extra_node)
9381 enum gomp_map_kind mkind
9382 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
9383 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
9385 gcc_assert (grp_start != grp_end);
9387 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
9388 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9389 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
9390 OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
9391 tree grp_mid = NULL_TREE;
9392 if (OMP_CLAUSE_CHAIN (grp_start) != grp_end)
9393 grp_mid = OMP_CLAUSE_CHAIN (grp_start);
9395 if (grp_mid && omp_map_clause_descriptor_p (grp_mid))
9396 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (grp_mid);
9397 else
9398 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
9400 if (grp_mid
9401 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
9402 && OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ALWAYS_POINTER)
9404 tree c3
9405 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
9406 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
9407 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
9408 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
9409 OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
9411 *extra_node = c3;
9413 else
9414 *extra_node = NULL_TREE;
9416 return c2;
9419 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
9420 and set *BITPOSP and *POFFSETP to the bit offset of the access.
9421 If BASE_REF is non-NULL and the containing object is a reference, set
9422 *BASE_REF to that reference before dereferencing the object.
9423 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
9424 has array type, else return NULL. */
9426 static tree
9427 extract_base_bit_offset (tree base, poly_int64 *bitposp,
9428 poly_offset_int *poffsetp,
9429 bool *variable_offset)
9431 tree offset;
9432 poly_int64 bitsize, bitpos;
9433 machine_mode mode;
9434 int unsignedp, reversep, volatilep = 0;
9435 poly_offset_int poffset;
9437 STRIP_NOPS (base);
9439 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
9440 &unsignedp, &reversep, &volatilep);
9442 STRIP_NOPS (base);
9444 if (offset && poly_int_tree_p (offset))
9446 poffset = wi::to_poly_offset (offset);
9447 *variable_offset = false;
9449 else
9451 poffset = 0;
9452 *variable_offset = (offset != NULL_TREE);
9455 if (maybe_ne (bitpos, 0))
9456 poffset += bits_to_bytes_round_down (bitpos);
9458 *bitposp = bitpos;
9459 *poffsetp = poffset;
9461 return base;
9464 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
9465 started processing the group yet. The TEMPORARY mark is used when we first
9466 encounter a group on a depth-first traversal, and the PERMANENT mark is used
9467 when we have processed all the group's children (i.e. all the base pointers
9468 referred to by the group's mapping nodes, recursively). */
9470 enum omp_tsort_mark {
9471 UNVISITED,
9472 TEMPORARY,
9473 PERMANENT
9476 /* Hash for trees based on operand_equal_p. Like tree_operand_hash
9477 but ignores side effects in the equality comparisons. */
9479 struct tree_operand_hash_no_se : tree_operand_hash
9481 static inline bool equal (const value_type &,
9482 const compare_type &);
9485 inline bool
9486 tree_operand_hash_no_se::equal (const value_type &t1,
9487 const compare_type &t2)
9489 return operand_equal_p (t1, t2, OEP_MATCH_SIDE_EFFECTS);
9492 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
9493 clause. */
9495 struct omp_mapping_group {
9496 tree *grp_start;
9497 tree grp_end;
9498 omp_tsort_mark mark;
9499 /* If we've removed the group but need to reindex, mark the group as
9500 deleted. */
9501 bool deleted;
9502 /* The group points to an already-created "GOMP_MAP_STRUCT
9503 GOMP_MAP_ATTACH_DETACH" pair. */
9504 bool reprocess_struct;
9505 /* The group should use "zero-length" allocations for pointers that are not
9506 mapped "to" on the same directive. */
9507 bool fragile;
9508 struct omp_mapping_group *sibling;
9509 struct omp_mapping_group *next;
9512 DEBUG_FUNCTION void
9513 debug_mapping_group (omp_mapping_group *grp)
9515 tree tmp = OMP_CLAUSE_CHAIN (grp->grp_end);
9516 OMP_CLAUSE_CHAIN (grp->grp_end) = NULL;
9517 debug_generic_expr (*grp->grp_start);
9518 OMP_CLAUSE_CHAIN (grp->grp_end) = tmp;
9521 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
9522 isn't one. */
9524 static tree
9525 omp_get_base_pointer (tree expr)
9527 while (TREE_CODE (expr) == ARRAY_REF
9528 || TREE_CODE (expr) == COMPONENT_REF)
9529 expr = TREE_OPERAND (expr, 0);
9531 if (INDIRECT_REF_P (expr)
9532 || (TREE_CODE (expr) == MEM_REF
9533 && integer_zerop (TREE_OPERAND (expr, 1))))
9535 expr = TREE_OPERAND (expr, 0);
9536 while (TREE_CODE (expr) == COMPOUND_EXPR)
9537 expr = TREE_OPERAND (expr, 1);
9538 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
9539 expr = TREE_OPERAND (expr, 0);
9540 if (TREE_CODE (expr) == SAVE_EXPR)
9541 expr = TREE_OPERAND (expr, 0);
9542 STRIP_NOPS (expr);
9543 return expr;
9546 return NULL_TREE;
9549 /* An attach or detach operation depends directly on the address being
9550 attached/detached. Return that address, or none if there are no
9551 attachments/detachments. */
9553 static tree
9554 omp_get_attachment (omp_mapping_group *grp)
9556 tree node = *grp->grp_start;
9558 switch (OMP_CLAUSE_MAP_KIND (node))
9560 case GOMP_MAP_TO:
9561 case GOMP_MAP_FROM:
9562 case GOMP_MAP_TOFROM:
9563 case GOMP_MAP_ALWAYS_FROM:
9564 case GOMP_MAP_ALWAYS_TO:
9565 case GOMP_MAP_ALWAYS_TOFROM:
9566 case GOMP_MAP_FORCE_FROM:
9567 case GOMP_MAP_FORCE_TO:
9568 case GOMP_MAP_FORCE_TOFROM:
9569 case GOMP_MAP_FORCE_PRESENT:
9570 case GOMP_MAP_PRESENT_ALLOC:
9571 case GOMP_MAP_PRESENT_FROM:
9572 case GOMP_MAP_PRESENT_TO:
9573 case GOMP_MAP_PRESENT_TOFROM:
9574 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9575 case GOMP_MAP_ALWAYS_PRESENT_TO:
9576 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9577 case GOMP_MAP_ALLOC:
9578 case GOMP_MAP_RELEASE:
9579 case GOMP_MAP_DELETE:
9580 case GOMP_MAP_FORCE_ALLOC:
9581 if (node == grp->grp_end)
9582 return NULL_TREE;
9584 node = OMP_CLAUSE_CHAIN (node);
9585 if (node && omp_map_clause_descriptor_p (node))
9587 gcc_assert (node != grp->grp_end);
9588 node = OMP_CLAUSE_CHAIN (node);
9590 if (node)
9591 switch (OMP_CLAUSE_MAP_KIND (node))
9593 case GOMP_MAP_POINTER:
9594 case GOMP_MAP_ALWAYS_POINTER:
9595 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9596 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9597 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9598 return NULL_TREE;
9600 case GOMP_MAP_ATTACH_DETACH:
9601 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9602 case GOMP_MAP_DETACH:
9603 return OMP_CLAUSE_DECL (node);
9605 default:
9606 internal_error ("unexpected mapping node");
9608 return error_mark_node;
9610 case GOMP_MAP_TO_PSET:
9611 gcc_assert (node != grp->grp_end);
9612 node = OMP_CLAUSE_CHAIN (node);
9613 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9614 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9615 return OMP_CLAUSE_DECL (node);
9616 else
9617 internal_error ("unexpected mapping node");
9618 return error_mark_node;
9620 case GOMP_MAP_ATTACH:
9621 case GOMP_MAP_DETACH:
9622 node = OMP_CLAUSE_CHAIN (node);
9623 if (!node || *grp->grp_start == grp->grp_end)
9624 return OMP_CLAUSE_DECL (*grp->grp_start);
9625 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9626 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9627 return OMP_CLAUSE_DECL (*grp->grp_start);
9628 else
9629 internal_error ("unexpected mapping node");
9630 return error_mark_node;
9632 case GOMP_MAP_STRUCT:
9633 case GOMP_MAP_STRUCT_UNORD:
9634 case GOMP_MAP_FORCE_DEVICEPTR:
9635 case GOMP_MAP_DEVICE_RESIDENT:
9636 case GOMP_MAP_LINK:
9637 case GOMP_MAP_IF_PRESENT:
9638 case GOMP_MAP_FIRSTPRIVATE:
9639 case GOMP_MAP_FIRSTPRIVATE_INT:
9640 case GOMP_MAP_USE_DEVICE_PTR:
9641 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9642 return NULL_TREE;
9644 default:
9645 internal_error ("unexpected mapping node");
9648 return error_mark_node;
9651 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9652 mappings, return the chain pointer to the end of that group in the list. */
9654 static tree *
9655 omp_group_last (tree *start_p)
9657 tree c = *start_p, nc, *grp_last_p = start_p;
9659 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9661 nc = OMP_CLAUSE_CHAIN (c);
9663 if (!nc || OMP_CLAUSE_CODE (nc) != OMP_CLAUSE_MAP)
9664 return grp_last_p;
9666 switch (OMP_CLAUSE_MAP_KIND (c))
9668 default:
9669 while (nc
9670 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9671 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9672 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9673 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH
9674 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
9675 || (OMP_CLAUSE_MAP_KIND (nc)
9676 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9677 || (OMP_CLAUSE_MAP_KIND (nc)
9678 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)
9679 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH
9680 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ALWAYS_POINTER
9681 || omp_map_clause_descriptor_p (nc)))
9683 tree nc2 = OMP_CLAUSE_CHAIN (nc);
9684 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH)
9686 /* In the specific case we're doing "exit data" on an array
9687 slice of a reference-to-pointer struct component, we will see
9688 DETACH followed by ATTACH_DETACH here. We want to treat that
9689 as a single group. In other cases DETACH might represent a
9690 stand-alone "detach" clause, so we don't want to consider
9691 that part of the group. */
9692 if (nc2
9693 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9694 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH_DETACH)
9695 goto consume_two_nodes;
9696 else
9697 break;
9699 if (nc2
9700 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9701 && (OMP_CLAUSE_MAP_KIND (nc)
9702 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9703 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH)
9705 consume_two_nodes:
9706 grp_last_p = &OMP_CLAUSE_CHAIN (nc);
9707 c = nc2;
9708 nc = OMP_CLAUSE_CHAIN (nc2);
9710 else
9712 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9713 c = nc;
9714 nc = nc2;
9717 break;
9719 case GOMP_MAP_ATTACH:
9720 case GOMP_MAP_DETACH:
9721 /* This is a weird artifact of how directives are parsed: bare attach or
9722 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9723 FIRSTPRIVATE_REFERENCE node. FIXME. */
9724 if (nc
9725 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9726 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9727 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER))
9728 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9729 break;
9731 case GOMP_MAP_TO_PSET:
9732 if (OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9733 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH
9734 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH))
9735 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9736 break;
9738 case GOMP_MAP_STRUCT:
9739 case GOMP_MAP_STRUCT_UNORD:
9741 unsigned HOST_WIDE_INT num_mappings
9742 = tree_to_uhwi (OMP_CLAUSE_SIZE (c));
9743 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9744 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9745 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH)
9746 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9747 for (unsigned i = 0; i < num_mappings; i++)
9748 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9750 break;
9753 return grp_last_p;
9756 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9757 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9758 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9759 if we have more than one such group, else return NULL. */
9761 static void
9762 omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
9763 tree gather_sentinel)
9765 for (tree *cp = list_p;
9766 *cp && *cp != gather_sentinel;
9767 cp = &OMP_CLAUSE_CHAIN (*cp))
9769 if (OMP_CLAUSE_CODE (*cp) != OMP_CLAUSE_MAP)
9770 continue;
9772 tree *grp_last_p = omp_group_last (cp);
9773 omp_mapping_group grp;
9775 grp.grp_start = cp;
9776 grp.grp_end = *grp_last_p;
9777 grp.mark = UNVISITED;
9778 grp.sibling = NULL;
9779 grp.deleted = false;
9780 grp.reprocess_struct = false;
9781 grp.fragile = false;
9782 grp.next = NULL;
9783 groups->safe_push (grp);
9785 cp = grp_last_p;
9789 static vec<omp_mapping_group> *
9790 omp_gather_mapping_groups (tree *list_p)
9792 vec<omp_mapping_group> *groups = new vec<omp_mapping_group> ();
9794 omp_gather_mapping_groups_1 (list_p, groups, NULL_TREE);
9796 if (groups->length () > 0)
9797 return groups;
9798 else
9800 delete groups;
9801 return NULL;
9805 /* A pointer mapping group GRP may define a block of memory starting at some
9806 base address, and maybe also define a firstprivate pointer or firstprivate
9807 reference that points to that block. The return value is a node containing
9808 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9809 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9810 return the number of consecutive chained nodes in CHAINED. */
9812 static tree
9813 omp_group_base (omp_mapping_group *grp, unsigned int *chained,
9814 tree *firstprivate)
9816 tree node = *grp->grp_start;
9818 *firstprivate = NULL_TREE;
9819 *chained = 1;
9821 switch (OMP_CLAUSE_MAP_KIND (node))
9823 case GOMP_MAP_TO:
9824 case GOMP_MAP_FROM:
9825 case GOMP_MAP_TOFROM:
9826 case GOMP_MAP_ALWAYS_FROM:
9827 case GOMP_MAP_ALWAYS_TO:
9828 case GOMP_MAP_ALWAYS_TOFROM:
9829 case GOMP_MAP_FORCE_FROM:
9830 case GOMP_MAP_FORCE_TO:
9831 case GOMP_MAP_FORCE_TOFROM:
9832 case GOMP_MAP_FORCE_PRESENT:
9833 case GOMP_MAP_PRESENT_ALLOC:
9834 case GOMP_MAP_PRESENT_FROM:
9835 case GOMP_MAP_PRESENT_TO:
9836 case GOMP_MAP_PRESENT_TOFROM:
9837 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9838 case GOMP_MAP_ALWAYS_PRESENT_TO:
9839 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9840 case GOMP_MAP_ALLOC:
9841 case GOMP_MAP_RELEASE:
9842 case GOMP_MAP_DELETE:
9843 case GOMP_MAP_FORCE_ALLOC:
9844 case GOMP_MAP_IF_PRESENT:
9845 if (node == grp->grp_end)
9846 return node;
9848 node = OMP_CLAUSE_CHAIN (node);
9849 if (!node)
9850 internal_error ("unexpected mapping node");
9851 if (omp_map_clause_descriptor_p (node))
9853 if (node == grp->grp_end)
9854 return *grp->grp_start;
9855 node = OMP_CLAUSE_CHAIN (node);
9857 switch (OMP_CLAUSE_MAP_KIND (node))
9859 case GOMP_MAP_POINTER:
9860 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9861 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9862 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9863 *firstprivate = OMP_CLAUSE_DECL (node);
9864 return *grp->grp_start;
9866 case GOMP_MAP_ALWAYS_POINTER:
9867 case GOMP_MAP_ATTACH_DETACH:
9868 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9869 case GOMP_MAP_DETACH:
9870 return *grp->grp_start;
9872 default:
9873 internal_error ("unexpected mapping node");
9875 return error_mark_node;
9877 case GOMP_MAP_TO_PSET:
9878 gcc_assert (node != grp->grp_end);
9879 node = OMP_CLAUSE_CHAIN (node);
9880 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9881 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9882 return NULL_TREE;
9883 else
9884 internal_error ("unexpected mapping node");
9885 return error_mark_node;
9887 case GOMP_MAP_ATTACH:
9888 case GOMP_MAP_DETACH:
9889 node = OMP_CLAUSE_CHAIN (node);
9890 if (!node || *grp->grp_start == grp->grp_end)
9891 return NULL_TREE;
9892 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9893 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9895 /* We're mapping the base pointer itself in a bare attach or detach
9896 node. This is a side effect of how parsing works, and the mapping
9897 will be removed anyway (at least for enter/exit data directives).
9898 We should ignore the mapping here. FIXME. */
9899 return NULL_TREE;
9901 else
9902 internal_error ("unexpected mapping node");
9903 return error_mark_node;
9905 case GOMP_MAP_STRUCT:
9906 case GOMP_MAP_STRUCT_UNORD:
9908 unsigned HOST_WIDE_INT num_mappings
9909 = tree_to_uhwi (OMP_CLAUSE_SIZE (node));
9910 node = OMP_CLAUSE_CHAIN (node);
9911 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9912 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9914 *firstprivate = OMP_CLAUSE_DECL (node);
9915 node = OMP_CLAUSE_CHAIN (node);
9917 else if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH_DETACH)
9918 node = OMP_CLAUSE_CHAIN (node);
9919 *chained = num_mappings;
9920 return node;
9923 case GOMP_MAP_FORCE_DEVICEPTR:
9924 case GOMP_MAP_DEVICE_RESIDENT:
9925 case GOMP_MAP_LINK:
9926 case GOMP_MAP_FIRSTPRIVATE:
9927 case GOMP_MAP_FIRSTPRIVATE_INT:
9928 case GOMP_MAP_USE_DEVICE_PTR:
9929 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9930 return NULL_TREE;
9932 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9933 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9934 case GOMP_MAP_POINTER:
9935 case GOMP_MAP_ALWAYS_POINTER:
9936 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9937 /* These shouldn't appear by themselves. */
9938 if (!seen_error ())
9939 internal_error ("unexpected pointer mapping node");
9940 return error_mark_node;
9942 default:
9943 gcc_unreachable ();
9946 return error_mark_node;
9949 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9950 nodes by tree_operand_hash_no_se. */
9952 static void
9953 omp_index_mapping_groups_1 (hash_map<tree_operand_hash_no_se,
9954 omp_mapping_group *> *grpmap,
9955 vec<omp_mapping_group> *groups,
9956 tree reindex_sentinel)
9958 omp_mapping_group *grp;
9959 unsigned int i;
9960 bool reindexing = reindex_sentinel != NULL_TREE, above_hwm = false;
9962 FOR_EACH_VEC_ELT (*groups, i, grp)
9964 if (reindexing && *grp->grp_start == reindex_sentinel)
9965 above_hwm = true;
9967 if (reindexing && !above_hwm)
9968 continue;
9970 if (grp->reprocess_struct)
9971 continue;
9973 tree fpp;
9974 unsigned int chained;
9975 tree node = omp_group_base (grp, &chained, &fpp);
9977 if (node == error_mark_node || (!node && !fpp))
9978 continue;
9980 for (unsigned j = 0;
9981 node && j < chained;
9982 node = OMP_CLAUSE_CHAIN (node), j++)
9984 tree decl = OMP_CLAUSE_DECL (node);
9985 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9986 meaning node-hash lookups don't work. This is a workaround for
9987 that, but ideally we should just create the INDIRECT_REF at
9988 source instead. FIXME. */
9989 if (TREE_CODE (decl) == MEM_REF
9990 && integer_zerop (TREE_OPERAND (decl, 1)))
9991 decl = build_fold_indirect_ref (TREE_OPERAND (decl, 0));
9993 omp_mapping_group **prev = grpmap->get (decl);
9995 if (prev && *prev == grp)
9996 /* Empty. */;
9997 else if (prev)
9999 /* Mapping the same thing twice is normally diagnosed as an error,
10000 but can happen under some circumstances, e.g. in pr99928-16.c,
10001 the directive:
10003 #pragma omp target simd reduction(+:a[:3]) \
10004 map(always, tofrom: a[:6])
10007 will result in two "a[0]" mappings (of different sizes). */
10009 grp->sibling = (*prev)->sibling;
10010 (*prev)->sibling = grp;
10012 else
10013 grpmap->put (decl, grp);
10016 if (!fpp)
10017 continue;
10019 omp_mapping_group **prev = grpmap->get (fpp);
10020 if (prev && *prev != grp)
10022 grp->sibling = (*prev)->sibling;
10023 (*prev)->sibling = grp;
10025 else
10026 grpmap->put (fpp, grp);
10030 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
10031 omp_index_mapping_groups (vec<omp_mapping_group> *groups)
10033 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
10034 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
10036 omp_index_mapping_groups_1 (grpmap, groups, NULL_TREE);
10038 return grpmap;
10041 /* Rebuild group map from partially-processed clause list (during
10042 omp_build_struct_sibling_lists). We have already processed nodes up until
10043 a high-water mark (HWM). This is a bit tricky because the list is being
10044 reordered as it is scanned, but we know:
10046 1. The list after HWM has not been touched yet, so we can reindex it safely.
10048 2. The list before and including HWM has been altered, but remains
10049 well-formed throughout the sibling-list building operation.
10051 so, we can do the reindex operation in two parts, on the processed and
10052 then the unprocessed halves of the list. */
10054 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
10055 omp_reindex_mapping_groups (tree *list_p,
10056 vec<omp_mapping_group> *groups,
10057 vec<omp_mapping_group> *processed_groups,
10058 tree sentinel)
10060 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
10061 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
10063 processed_groups->truncate (0);
10065 omp_gather_mapping_groups_1 (list_p, processed_groups, sentinel);
10066 omp_index_mapping_groups_1 (grpmap, processed_groups, NULL_TREE);
10067 if (sentinel)
10068 omp_index_mapping_groups_1 (grpmap, groups, sentinel);
10070 return grpmap;
10073 /* Find the immediately-containing struct for a component ref (etc.)
10074 expression EXPR. */
10076 static tree
10077 omp_containing_struct (tree expr)
10079 tree expr0 = expr;
10081 STRIP_NOPS (expr);
10083 /* Note: don't strip NOPs unless we're also stripping off array refs or a
10084 component ref. */
10085 if (TREE_CODE (expr) != ARRAY_REF && TREE_CODE (expr) != COMPONENT_REF)
10086 return expr0;
10088 while (TREE_CODE (expr) == ARRAY_REF)
10089 expr = TREE_OPERAND (expr, 0);
10091 if (TREE_CODE (expr) == COMPONENT_REF)
10092 expr = TREE_OPERAND (expr, 0);
10094 return expr;
10097 /* Return TRUE if DECL describes a component that is part of a whole structure
10098 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
10099 that maps that structure, if present. */
10101 static bool
10102 omp_mapped_by_containing_struct (hash_map<tree_operand_hash_no_se,
10103 omp_mapping_group *> *grpmap,
10104 tree decl,
10105 omp_mapping_group **mapped_by_group)
10107 tree wsdecl = NULL_TREE;
10109 *mapped_by_group = NULL;
10111 while (true)
10113 wsdecl = omp_containing_struct (decl);
10114 if (wsdecl == decl)
10115 break;
10116 omp_mapping_group **wholestruct = grpmap->get (wsdecl);
10117 if (!wholestruct
10118 && TREE_CODE (wsdecl) == MEM_REF
10119 && integer_zerop (TREE_OPERAND (wsdecl, 1)))
10121 tree deref = TREE_OPERAND (wsdecl, 0);
10122 deref = build_fold_indirect_ref (deref);
10123 wholestruct = grpmap->get (deref);
10125 if (wholestruct)
10127 *mapped_by_group = *wholestruct;
10128 return true;
10130 decl = wsdecl;
10133 return false;
10136 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
10137 FALSE on error. */
10139 static bool
10140 omp_tsort_mapping_groups_1 (omp_mapping_group ***outlist,
10141 vec<omp_mapping_group> *groups,
10142 hash_map<tree_operand_hash_no_se,
10143 omp_mapping_group *> *grpmap,
10144 omp_mapping_group *grp)
10146 if (grp->mark == PERMANENT)
10147 return true;
10148 if (grp->mark == TEMPORARY)
10150 fprintf (stderr, "when processing group:\n");
10151 debug_mapping_group (grp);
10152 internal_error ("base pointer cycle detected");
10153 return false;
10155 grp->mark = TEMPORARY;
10157 tree attaches_to = omp_get_attachment (grp);
10159 if (attaches_to)
10161 omp_mapping_group **basep = grpmap->get (attaches_to);
10163 if (basep && *basep != grp)
10165 for (omp_mapping_group *w = *basep; w; w = w->sibling)
10166 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
10167 return false;
10171 tree decl = OMP_CLAUSE_DECL (*grp->grp_start);
10173 while (decl)
10175 tree base = omp_get_base_pointer (decl);
10177 if (!base)
10178 break;
10180 omp_mapping_group **innerp = grpmap->get (base);
10181 omp_mapping_group *wholestruct;
10183 /* We should treat whole-structure mappings as if all (pointer, in this
10184 case) members are mapped as individual list items. Check if we have
10185 such a whole-structure mapping, if we don't have an explicit reference
10186 to the pointer member itself. */
10187 if (!innerp
10188 && TREE_CODE (base) == COMPONENT_REF
10189 && omp_mapped_by_containing_struct (grpmap, base, &wholestruct))
10190 innerp = &wholestruct;
10192 if (innerp && *innerp != grp)
10194 for (omp_mapping_group *w = *innerp; w; w = w->sibling)
10195 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
10196 return false;
10197 break;
10200 decl = base;
10203 grp->mark = PERMANENT;
10205 /* Emit grp to output list. */
10207 **outlist = grp;
10208 *outlist = &grp->next;
10210 return true;
10213 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
10214 before mappings that use those pointers. This is an implementation of the
10215 depth-first search algorithm, described e.g. at:
10217 https://en.wikipedia.org/wiki/Topological_sorting
10220 static omp_mapping_group *
10221 omp_tsort_mapping_groups (vec<omp_mapping_group> *groups,
10222 hash_map<tree_operand_hash_no_se, omp_mapping_group *>
10223 *grpmap,
10224 bool enter_exit_data)
10226 omp_mapping_group *grp, *outlist = NULL, **cursor;
10227 unsigned int i;
10228 bool saw_runtime_implicit = false;
10230 cursor = &outlist;
10232 FOR_EACH_VEC_ELT (*groups, i, grp)
10234 if (grp->mark != PERMANENT)
10236 if (OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp->grp_start))
10238 saw_runtime_implicit = true;
10239 continue;
10241 if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
10242 return NULL;
10246 if (!saw_runtime_implicit)
10247 return outlist;
10249 FOR_EACH_VEC_ELT (*groups, i, grp)
10251 if (grp->mark != PERMANENT
10252 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp->grp_start))
10254 /* Clear the flag for enter/exit data because it is currently
10255 meaningless for those operations in libgomp. */
10256 if (enter_exit_data)
10257 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp->grp_start) = 0;
10259 if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
10260 return NULL;
10264 return outlist;
10267 /* Split INLIST into three parts:
10269 - "present" alloc/to/from groups
10270 - other to/from groups
10271 - other alloc/release/delete groups
10273 These sub-lists are then concatenated together to form the final list.
10274 Each sub-list retains the order of the original list.
10275 Note that ATTACH nodes are later moved to the end of the list in
10276 gimplify_adjust_omp_clauses, for target regions. */
10278 static omp_mapping_group *
10279 omp_segregate_mapping_groups (omp_mapping_group *inlist)
10281 omp_mapping_group *ard_groups = NULL, *tf_groups = NULL;
10282 omp_mapping_group *p_groups = NULL;
10283 omp_mapping_group **ard_tail = &ard_groups, **tf_tail = &tf_groups;
10284 omp_mapping_group **p_tail = &p_groups;
10286 for (omp_mapping_group *w = inlist; w;)
10288 tree c = *w->grp_start;
10289 omp_mapping_group *next = w->next;
10291 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
10293 switch (OMP_CLAUSE_MAP_KIND (c))
10295 case GOMP_MAP_ALLOC:
10296 case GOMP_MAP_RELEASE:
10297 case GOMP_MAP_DELETE:
10298 *ard_tail = w;
10299 w->next = NULL;
10300 ard_tail = &w->next;
10301 break;
10303 /* These map types are all semantically identical, so are moved into a
10304 single group. They will each be changed into GOMP_MAP_FORCE_PRESENT
10305 in gimplify_adjust_omp_clauses. */
10306 case GOMP_MAP_PRESENT_ALLOC:
10307 case GOMP_MAP_PRESENT_FROM:
10308 case GOMP_MAP_PRESENT_TO:
10309 case GOMP_MAP_PRESENT_TOFROM:
10310 *p_tail = w;
10311 w->next = NULL;
10312 p_tail = &w->next;
10313 break;
10315 default:
10316 *tf_tail = w;
10317 w->next = NULL;
10318 tf_tail = &w->next;
10321 w = next;
10324 /* Now splice the lists together... */
10325 *tf_tail = ard_groups;
10326 *p_tail = tf_groups;
10328 return p_groups;
10331 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
10332 those groups based on the output list of omp_tsort_mapping_groups --
10333 singly-linked, threaded through each element's NEXT pointer starting at
10334 HEAD. Each list element appears exactly once in that linked list.
10336 Each element of GROUPS may correspond to one or several mapping nodes.
10337 Node groups are kept together, and in the reordered list, the positions of
10338 the original groups are reused for the positions of the reordered list.
10339 Hence if we have e.g.
10341 {to ptr ptr} firstprivate {tofrom ptr} ...
10342 ^ ^ ^
10343 first group non-"map" second group
10345 and say the second group contains a base pointer for the first so must be
10346 moved before it, the resulting list will contain:
10348 {tofrom ptr} firstprivate {to ptr ptr} ...
10349 ^ prev. second group ^ prev. first group
10352 static tree *
10353 omp_reorder_mapping_groups (vec<omp_mapping_group> *groups,
10354 omp_mapping_group *head,
10355 tree *list_p)
10357 omp_mapping_group *grp;
10358 unsigned int i;
10359 unsigned numgroups = groups->length ();
10360 auto_vec<tree> old_heads (numgroups);
10361 auto_vec<tree *> old_headps (numgroups);
10362 auto_vec<tree> new_heads (numgroups);
10363 auto_vec<tree> old_succs (numgroups);
10364 bool map_at_start = (list_p == (*groups)[0].grp_start);
10366 tree *new_grp_tail = NULL;
10368 /* Stash the start & end nodes of each mapping group before we start
10369 modifying the list. */
10370 FOR_EACH_VEC_ELT (*groups, i, grp)
10372 old_headps.quick_push (grp->grp_start);
10373 old_heads.quick_push (*grp->grp_start);
10374 old_succs.quick_push (OMP_CLAUSE_CHAIN (grp->grp_end));
10377 /* And similarly, the heads of the groups in the order we want to rearrange
10378 the list to. */
10379 for (omp_mapping_group *w = head; w; w = w->next)
10380 new_heads.quick_push (*w->grp_start);
10382 FOR_EACH_VEC_ELT (*groups, i, grp)
10384 gcc_assert (head);
10386 if (new_grp_tail && old_succs[i - 1] == old_heads[i])
10388 /* a {b c d} {e f g} h i j (original)
10390 a {k l m} {e f g} h i j (inserted new group on last iter)
10392 a {k l m} {n o p} h i j (this time, chain last group to new one)
10393 ^new_grp_tail
10395 *new_grp_tail = new_heads[i];
10397 else if (new_grp_tail)
10399 /* a {b c d} e {f g h} i j k (original)
10401 a {l m n} e {f g h} i j k (gap after last iter's group)
10403 a {l m n} e {o p q} h i j (chain last group to old successor)
10404 ^new_grp_tail
10406 *new_grp_tail = old_succs[i - 1];
10407 *old_headps[i] = new_heads[i];
10409 else
10411 /* The first inserted group -- point to new group, and leave end
10412 open.
10413 a {b c d} e f
10415 a {g h i...
10417 *grp->grp_start = new_heads[i];
10420 new_grp_tail = &OMP_CLAUSE_CHAIN (head->grp_end);
10422 head = head->next;
10425 if (new_grp_tail)
10426 *new_grp_tail = old_succs[numgroups - 1];
10428 gcc_assert (!head);
10430 return map_at_start ? (*groups)[0].grp_start : list_p;
10433 /* DECL is supposed to have lastprivate semantics in the outer contexts
10434 of combined/composite constructs, starting with OCTX.
10435 Add needed lastprivate, shared or map clause if no data sharing or
10436 mapping clause are present. IMPLICIT_P is true if it is an implicit
10437 clause (IV on simd), in which case the lastprivate will not be
10438 copied to some constructs. */
10440 static void
10441 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
10442 tree decl, bool implicit_p)
10444 struct gimplify_omp_ctx *orig_octx = octx;
10445 for (; octx; octx = octx->outer_context)
10447 if ((octx->region_type == ORT_COMBINED_PARALLEL
10448 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
10449 && splay_tree_lookup (octx->variables,
10450 (splay_tree_key) decl) == NULL)
10452 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
10453 continue;
10455 if ((octx->region_type & ORT_TASK) != 0
10456 && octx->combined_loop
10457 && splay_tree_lookup (octx->variables,
10458 (splay_tree_key) decl) == NULL)
10460 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
10461 continue;
10463 if (implicit_p
10464 && octx->region_type == ORT_WORKSHARE
10465 && octx->combined_loop
10466 && splay_tree_lookup (octx->variables,
10467 (splay_tree_key) decl) == NULL
10468 && octx->outer_context
10469 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
10470 && splay_tree_lookup (octx->outer_context->variables,
10471 (splay_tree_key) decl) == NULL)
10473 octx = octx->outer_context;
10474 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
10475 continue;
10477 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
10478 && octx->combined_loop
10479 && splay_tree_lookup (octx->variables,
10480 (splay_tree_key) decl) == NULL
10481 && !omp_check_private (octx, decl, false))
10483 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
10484 continue;
10486 if (octx->region_type == ORT_COMBINED_TARGET)
10488 splay_tree_node n = splay_tree_lookup (octx->variables,
10489 (splay_tree_key) decl);
10490 if (n == NULL)
10492 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
10493 octx = octx->outer_context;
10495 else if (!implicit_p
10496 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
10498 n->value &= ~(GOVD_FIRSTPRIVATE
10499 | GOVD_FIRSTPRIVATE_IMPLICIT
10500 | GOVD_EXPLICIT);
10501 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
10502 octx = octx->outer_context;
10505 break;
10507 if (octx && (implicit_p || octx != orig_octx))
10508 omp_notice_variable (octx, decl, true);
10511 /* We might have indexed several groups for DECL, e.g. a "TO" mapping and also
10512 a "FIRSTPRIVATE" mapping. Return the one that isn't firstprivate, etc. */
10514 static omp_mapping_group *
10515 omp_get_nonfirstprivate_group (hash_map<tree_operand_hash_no_se,
10516 omp_mapping_group *> *grpmap,
10517 tree decl, bool allow_deleted = false)
10519 omp_mapping_group **to_group_p = grpmap->get (decl);
10521 if (!to_group_p)
10522 return NULL;
10524 omp_mapping_group *to_group = *to_group_p;
10526 for (; to_group; to_group = to_group->sibling)
10528 tree grp_end = to_group->grp_end;
10529 switch (OMP_CLAUSE_MAP_KIND (grp_end))
10531 case GOMP_MAP_FIRSTPRIVATE_POINTER:
10532 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
10533 break;
10535 default:
10536 if (allow_deleted || !to_group->deleted)
10537 return to_group;
10541 return NULL;
10544 /* Return TRUE if the directive (whose clauses are described by the hash table
10545 of mapping groups, GRPMAP) maps DECL explicitly. If TO_SPECIFICALLY is
10546 true, only count TO mappings. If ALLOW_DELETED is true, ignore the
10547 "deleted" flag for groups. If CONTAINED_IN_STRUCT is true, also return
10548 TRUE if DECL is mapped as a member of a whole-struct mapping. */
10550 static bool
10551 omp_directive_maps_explicitly (hash_map<tree_operand_hash_no_se,
10552 omp_mapping_group *> *grpmap,
10553 tree decl, omp_mapping_group **base_group,
10554 bool to_specifically, bool allow_deleted,
10555 bool contained_in_struct)
10557 omp_mapping_group *decl_group
10558 = omp_get_nonfirstprivate_group (grpmap, decl, allow_deleted);
10560 *base_group = NULL;
10562 if (decl_group)
10564 tree grp_first = *decl_group->grp_start;
10565 /* We might be called during omp_build_struct_sibling_lists, when
10566 GOMP_MAP_STRUCT might have been inserted at the start of the group.
10567 Skip over that, and also possibly the node after it. */
10568 if (OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_STRUCT
10569 || OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_STRUCT_UNORD)
10571 grp_first = OMP_CLAUSE_CHAIN (grp_first);
10572 if (OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_FIRSTPRIVATE_POINTER
10573 || (OMP_CLAUSE_MAP_KIND (grp_first)
10574 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
10575 || OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_ATTACH_DETACH)
10576 grp_first = OMP_CLAUSE_CHAIN (grp_first);
10578 enum gomp_map_kind first_kind = OMP_CLAUSE_MAP_KIND (grp_first);
10579 if (!to_specifically
10580 || GOMP_MAP_COPY_TO_P (first_kind)
10581 || first_kind == GOMP_MAP_ALLOC)
10583 *base_group = decl_group;
10584 return true;
10588 if (contained_in_struct
10589 && omp_mapped_by_containing_struct (grpmap, decl, base_group))
10590 return true;
10592 return false;
10595 /* If we have mappings INNER and OUTER, where INNER is a component access and
10596 OUTER is a mapping of the whole containing struct, check that the mappings
10597 are compatible. We'll be deleting the inner mapping, so we need to make
10598 sure the outer mapping does (at least) the same transfers to/from the device
10599 as the inner mapping. */
10601 bool
10602 omp_check_mapping_compatibility (location_t loc,
10603 omp_mapping_group *outer,
10604 omp_mapping_group *inner)
10606 tree first_outer = *outer->grp_start, first_inner = *inner->grp_start;
10608 gcc_assert (OMP_CLAUSE_CODE (first_outer) == OMP_CLAUSE_MAP);
10609 gcc_assert (OMP_CLAUSE_CODE (first_inner) == OMP_CLAUSE_MAP);
10611 enum gomp_map_kind outer_kind = OMP_CLAUSE_MAP_KIND (first_outer);
10612 enum gomp_map_kind inner_kind = OMP_CLAUSE_MAP_KIND (first_inner);
10614 if (outer_kind == inner_kind)
10615 return true;
10617 switch (outer_kind)
10619 case GOMP_MAP_ALWAYS_TO:
10620 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10621 || inner_kind == GOMP_MAP_ALLOC
10622 || inner_kind == GOMP_MAP_TO)
10623 return true;
10624 break;
10626 case GOMP_MAP_ALWAYS_FROM:
10627 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10628 || inner_kind == GOMP_MAP_RELEASE
10629 || inner_kind == GOMP_MAP_FROM)
10630 return true;
10631 break;
10633 case GOMP_MAP_TO:
10634 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10635 || inner_kind == GOMP_MAP_ALLOC)
10636 return true;
10637 break;
10639 case GOMP_MAP_FROM:
10640 if (inner_kind == GOMP_MAP_RELEASE
10641 || inner_kind == GOMP_MAP_FORCE_PRESENT)
10642 return true;
10643 break;
10645 case GOMP_MAP_ALWAYS_TOFROM:
10646 case GOMP_MAP_TOFROM:
10647 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10648 || inner_kind == GOMP_MAP_ALLOC
10649 || inner_kind == GOMP_MAP_TO
10650 || inner_kind == GOMP_MAP_FROM
10651 || inner_kind == GOMP_MAP_TOFROM)
10652 return true;
10653 break;
10655 default:
10659 error_at (loc, "data movement for component %qE is not compatible with "
10660 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner),
10661 OMP_CLAUSE_DECL (first_outer));
10663 return false;
10666 /* This function handles several cases where clauses on a mapping directive
10667 can interact with each other.
10669 If we have a FIRSTPRIVATE_POINTER node and we're also mapping the pointer
10670 on the same directive, change the mapping of the first node to
10671 ATTACH_DETACH. We should have detected that this will happen already in
10672 c-omp.cc:c_omp_adjust_map_clauses and marked the appropriate decl
10673 as addressable. (If we didn't, bail out.)
10675 If we have a FIRSTPRIVATE_REFERENCE (for a reference to pointer) and we're
10676 mapping the base pointer also, we may need to change the mapping type to
10677 ATTACH_DETACH and synthesize an alloc node for the reference itself.
10679 If we have an ATTACH_DETACH node, this is an array section with a pointer
10680 base. If we're mapping the base on the same directive too, we can drop its
10681 mapping. However, if we have a reference to pointer, make other appropriate
10682 adjustments to the mapping nodes instead.
10684 If we have an ATTACH_DETACH node with a Fortran pointer-set (array
10685 descriptor) mapping for a derived-type component, and we're also mapping the
10686 whole of the derived-type variable on another clause, the pointer-set
10687 mapping is removed.
10689 If we have a component access but we're also mapping the whole of the
10690 containing struct, drop the former access.
10692 If the expression is a component access, and we're also mapping a base
10693 pointer used in that component access in the same expression, change the
10694 mapping type of the latter to ALLOC (ready for processing by
10695 omp_build_struct_sibling_lists). */
10697 void
10698 omp_resolve_clause_dependencies (enum tree_code code,
10699 vec<omp_mapping_group> *groups,
10700 hash_map<tree_operand_hash_no_se,
10701 omp_mapping_group *> *grpmap)
10703 int i;
10704 omp_mapping_group *grp;
10705 bool repair_chain = false;
10707 FOR_EACH_VEC_ELT (*groups, i, grp)
10709 tree grp_end = grp->grp_end;
10710 tree decl = OMP_CLAUSE_DECL (grp_end);
10712 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
10714 switch (OMP_CLAUSE_MAP_KIND (grp_end))
10716 case GOMP_MAP_FIRSTPRIVATE_POINTER:
10718 omp_mapping_group *to_group
10719 = omp_get_nonfirstprivate_group (grpmap, decl);
10721 if (!to_group || to_group == grp)
10722 continue;
10724 tree grp_first = *to_group->grp_start;
10725 enum gomp_map_kind first_kind = OMP_CLAUSE_MAP_KIND (grp_first);
10727 if ((GOMP_MAP_COPY_TO_P (first_kind)
10728 || first_kind == GOMP_MAP_ALLOC)
10729 && (OMP_CLAUSE_MAP_KIND (to_group->grp_end)
10730 != GOMP_MAP_FIRSTPRIVATE_POINTER))
10732 gcc_assert (TREE_ADDRESSABLE (OMP_CLAUSE_DECL (grp_end)));
10733 OMP_CLAUSE_SET_MAP_KIND (grp_end, GOMP_MAP_ATTACH_DETACH);
10736 break;
10738 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
10740 tree ptr = build_fold_indirect_ref (decl);
10742 omp_mapping_group *to_group
10743 = omp_get_nonfirstprivate_group (grpmap, ptr);
10745 if (!to_group || to_group == grp)
10746 continue;
10748 tree grp_first = *to_group->grp_start;
10749 enum gomp_map_kind first_kind = OMP_CLAUSE_MAP_KIND (grp_first);
10751 if (GOMP_MAP_COPY_TO_P (first_kind)
10752 || first_kind == GOMP_MAP_ALLOC)
10754 OMP_CLAUSE_SET_MAP_KIND (grp_end, GOMP_MAP_ATTACH_DETACH);
10755 OMP_CLAUSE_DECL (grp_end) = ptr;
10756 if ((OMP_CLAUSE_CHAIN (*to_group->grp_start)
10757 == to_group->grp_end)
10758 && (OMP_CLAUSE_MAP_KIND (to_group->grp_end)
10759 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10761 gcc_assert (TREE_ADDRESSABLE
10762 (OMP_CLAUSE_DECL (to_group->grp_end)));
10763 OMP_CLAUSE_SET_MAP_KIND (to_group->grp_end,
10764 GOMP_MAP_ATTACH_DETACH);
10766 location_t loc = OMP_CLAUSE_LOCATION (to_group->grp_end);
10767 tree alloc
10768 = build_omp_clause (loc, OMP_CLAUSE_MAP);
10769 OMP_CLAUSE_SET_MAP_KIND (alloc, GOMP_MAP_ALLOC);
10770 tree tmp = build_fold_addr_expr (OMP_CLAUSE_DECL
10771 (to_group->grp_end));
10772 tree char_ptr_type = build_pointer_type (char_type_node);
10773 OMP_CLAUSE_DECL (alloc)
10774 = build2 (MEM_REF, char_type_node,
10775 tmp,
10776 build_int_cst (char_ptr_type, 0));
10777 OMP_CLAUSE_SIZE (alloc) = TYPE_SIZE_UNIT (TREE_TYPE (tmp));
10779 OMP_CLAUSE_CHAIN (alloc)
10780 = OMP_CLAUSE_CHAIN (*to_group->grp_start);
10781 OMP_CLAUSE_CHAIN (*to_group->grp_start) = alloc;
10785 break;
10787 case GOMP_MAP_ATTACH_DETACH:
10788 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
10790 tree base_ptr, referenced_ptr_node = NULL_TREE;
10792 while (TREE_CODE (decl) == ARRAY_REF)
10793 decl = TREE_OPERAND (decl, 0);
10795 if (TREE_CODE (decl) == INDIRECT_REF)
10796 decl = TREE_OPERAND (decl, 0);
10798 /* Only component accesses. */
10799 if (DECL_P (decl))
10800 continue;
10802 /* We want the pointer itself when checking if the base pointer is
10803 mapped elsewhere in the same directive -- if we have a
10804 reference to the pointer, don't use that. */
10806 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10807 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10809 referenced_ptr_node = OMP_CLAUSE_CHAIN (*grp->grp_start);
10810 base_ptr = OMP_CLAUSE_DECL (referenced_ptr_node);
10812 else
10813 base_ptr = decl;
10815 gomp_map_kind zlas_kind
10816 = (code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
10817 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION;
10819 if (TREE_CODE (TREE_TYPE (base_ptr)) == POINTER_TYPE)
10821 /* If we map the base TO, and we're doing an attachment, we can
10822 skip the TO mapping altogether and create an ALLOC mapping
10823 instead, since the attachment will overwrite the device
10824 pointer in that location immediately anyway. Otherwise,
10825 change our mapping to
10826 GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION in case the
10827 attachment target has not been copied to the device already
10828 by some earlier directive. */
10830 bool base_mapped_to = false;
10832 omp_mapping_group *base_group;
10834 if (omp_directive_maps_explicitly (grpmap, base_ptr,
10835 &base_group, false, true,
10836 false))
10838 if (referenced_ptr_node)
10840 base_mapped_to = true;
10841 if ((OMP_CLAUSE_MAP_KIND (base_group->grp_end)
10842 == GOMP_MAP_ATTACH_DETACH)
10843 && (OMP_CLAUSE_CHAIN (*base_group->grp_start)
10844 == base_group->grp_end))
10846 OMP_CLAUSE_CHAIN (*base_group->grp_start)
10847 = OMP_CLAUSE_CHAIN (base_group->grp_end);
10848 base_group->grp_end = *base_group->grp_start;
10849 repair_chain = true;
10852 else
10854 base_group->deleted = true;
10855 OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end) = 1;
10859 /* We're dealing with a reference to a pointer, and we are
10860 attaching both the reference and the pointer. We know the
10861 reference itself is on the target, because we are going to
10862 create an ALLOC node for it in accumulate_sibling_list. The
10863 pointer might be on the target already or it might not, but
10864 if it isn't then it's not an error, so use
10865 GOMP_MAP_ATTACH_ZLAS for it. */
10866 if (!base_mapped_to && referenced_ptr_node)
10867 OMP_CLAUSE_SET_MAP_KIND (referenced_ptr_node, zlas_kind);
10869 omp_mapping_group *struct_group;
10870 tree desc;
10871 if ((desc = OMP_CLAUSE_CHAIN (*grp->grp_start))
10872 && omp_map_clause_descriptor_p (desc)
10873 && omp_mapped_by_containing_struct (grpmap, decl,
10874 &struct_group))
10875 /* If we have a pointer set but we're mapping (or unmapping)
10876 the whole of the containing struct, we can remove the
10877 pointer set mapping. */
10878 OMP_CLAUSE_CHAIN (*grp->grp_start) = OMP_CLAUSE_CHAIN (desc);
10880 else if (TREE_CODE (TREE_TYPE (base_ptr)) == REFERENCE_TYPE
10881 && (TREE_CODE (TREE_TYPE (TREE_TYPE (base_ptr)))
10882 == ARRAY_TYPE)
10883 && OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION
10884 (*grp->grp_start))
10885 OMP_CLAUSE_SET_MAP_KIND (grp->grp_end, zlas_kind);
10887 break;
10889 case GOMP_MAP_ATTACH:
10890 /* Ignore standalone attach here. */
10891 break;
10893 default:
10895 omp_mapping_group *struct_group;
10896 if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
10897 && *grp->grp_start == grp_end)
10899 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10900 struct_group, grp);
10901 /* Remove the whole of this mapping -- redundant. */
10902 grp->deleted = true;
10905 tree base = decl;
10906 while ((base = omp_get_base_pointer (base)))
10908 omp_mapping_group *base_group;
10910 if (omp_directive_maps_explicitly (grpmap, base, &base_group,
10911 true, true, false))
10913 tree grp_first = *base_group->grp_start;
10914 OMP_CLAUSE_SET_MAP_KIND (grp_first, GOMP_MAP_ALLOC);
10921 if (repair_chain)
10923 /* Group start pointers may have become detached from the
10924 OMP_CLAUSE_CHAIN of previous groups if elements were removed from the
10925 end of those groups. Fix that now. */
10926 tree *new_next = NULL;
10927 FOR_EACH_VEC_ELT (*groups, i, grp)
10929 if (new_next)
10930 grp->grp_start = new_next;
10932 new_next = &OMP_CLAUSE_CHAIN (grp->grp_end);
10937 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
10938 clause dependencies we handle for now are struct element mappings and
10939 whole-struct mappings on the same directive, and duplicate clause
10940 detection. */
10942 void
10943 oacc_resolve_clause_dependencies (vec<omp_mapping_group> *groups,
10944 hash_map<tree_operand_hash_no_se,
10945 omp_mapping_group *> *grpmap)
10947 int i;
10948 omp_mapping_group *grp;
10949 hash_set<tree_operand_hash> *seen_components = NULL;
10950 hash_set<tree_operand_hash> *shown_error = NULL;
10952 FOR_EACH_VEC_ELT (*groups, i, grp)
10954 tree grp_end = grp->grp_end;
10955 tree decl = OMP_CLAUSE_DECL (grp_end);
10957 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
10959 if (DECL_P (grp_end))
10960 continue;
10962 tree c = OMP_CLAUSE_DECL (*grp->grp_start);
10963 while (TREE_CODE (c) == ARRAY_REF)
10964 c = TREE_OPERAND (c, 0);
10965 if (TREE_CODE (c) != COMPONENT_REF)
10966 continue;
10967 if (!seen_components)
10968 seen_components = new hash_set<tree_operand_hash> ();
10969 if (!shown_error)
10970 shown_error = new hash_set<tree_operand_hash> ();
10971 if (seen_components->contains (c)
10972 && !shown_error->contains (c))
10974 error_at (OMP_CLAUSE_LOCATION (grp_end),
10975 "%qE appears more than once in map clauses",
10976 OMP_CLAUSE_DECL (grp_end));
10977 shown_error->add (c);
10979 else
10980 seen_components->add (c);
10982 omp_mapping_group *struct_group;
10983 if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
10984 && *grp->grp_start == grp_end)
10986 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10987 struct_group, grp);
10988 /* Remove the whole of this mapping -- redundant. */
10989 grp->deleted = true;
10993 if (seen_components)
10994 delete seen_components;
10995 if (shown_error)
10996 delete shown_error;
10999 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
11000 is linked to the previous node pointed to by INSERT_AT. */
11002 static tree *
11003 omp_siblist_insert_node_after (tree newnode, tree *insert_at)
11005 OMP_CLAUSE_CHAIN (newnode) = *insert_at;
11006 *insert_at = newnode;
11007 return &OMP_CLAUSE_CHAIN (newnode);
11010 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
11011 pointed to by chain MOVE_AFTER instead. */
11013 static void
11014 omp_siblist_move_node_after (tree node, tree *old_pos, tree *move_after)
11016 gcc_assert (node == *old_pos);
11017 *old_pos = OMP_CLAUSE_CHAIN (node);
11018 OMP_CLAUSE_CHAIN (node) = *move_after;
11019 *move_after = node;
11022 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
11023 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
11024 new nodes are prepended to the list before splicing into the new position.
11025 Return the position we should continue scanning the list at, or NULL to
11026 stay where we were. */
11028 static tree *
11029 omp_siblist_move_nodes_after (tree *first_ptr, tree last_node,
11030 tree *move_after)
11032 if (first_ptr == move_after)
11033 return NULL;
11035 tree tmp = *first_ptr;
11036 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
11037 OMP_CLAUSE_CHAIN (last_node) = *move_after;
11038 *move_after = tmp;
11040 return first_ptr;
11043 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
11044 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
11045 pointer MOVE_AFTER.
11047 The latter list was previously part of the OMP clause list, and the former
11048 (prepended) part is comprised of new nodes.
11050 We start with a list of nodes starting with a struct mapping node. We
11051 rearrange the list so that new nodes starting from FIRST_NEW and whose last
11052 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
11053 the group of mapping nodes we are currently processing (from the chain
11054 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
11055 we should continue processing from, or NULL to stay where we were.
11057 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
11058 different) is worked through below. Here we are processing LAST_NODE, and
11059 FIRST_PTR points at the preceding mapping clause:
11061 #. mapping node chain
11062 ---------------------------------------------------
11063 A. struct_node [->B]
11064 B. comp_1 [->C]
11065 C. comp_2 [->D (move_after)]
11066 D. map_to_3 [->E]
11067 E. attach_3 [->F (first_ptr)]
11068 F. map_to_4 [->G (continue_at)]
11069 G. attach_4 (last_node) [->H]
11070 H. ...
11072 *last_new_tail = *first_ptr;
11074 I. new_node (first_new) [->F (last_new_tail)]
11076 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
11078 #. mapping node chain
11079 ----------------------------------------------------
11080 A. struct_node [->B]
11081 B. comp_1 [->C]
11082 C. comp_2 [->D (move_after)]
11083 D. map_to_3 [->E]
11084 E. attach_3 [->H (first_ptr)]
11085 F. map_to_4 [->G (continue_at)]
11086 G. attach_4 (last_node) [->H]
11087 H. ...
11089 I. new_node (first_new) [->F (last_new_tail)]
11091 OMP_CLAUSE_CHAIN (last_node) = *move_after;
11093 #. mapping node chain
11094 ---------------------------------------------------
11095 A. struct_node [->B]
11096 B. comp_1 [->C]
11097 C. comp_2 [->D (move_after)]
11098 D. map_to_3 [->E]
11099 E. attach_3 [->H (continue_at)]
11100 F. map_to_4 [->G]
11101 G. attach_4 (last_node) [->D]
11102 H. ...
11104 I. new_node (first_new) [->F (last_new_tail)]
11106 *move_after = first_new;
11108 #. mapping node chain
11109 ---------------------------------------------------
11110 A. struct_node [->B]
11111 B. comp_1 [->C]
11112 C. comp_2 [->I (move_after)]
11113 D. map_to_3 [->E]
11114 E. attach_3 [->H (continue_at)]
11115 F. map_to_4 [->G]
11116 G. attach_4 (last_node) [->D]
11117 H. ...
11118 I. new_node (first_new) [->F (last_new_tail)]
11120 or, in order:
11122 #. mapping node chain
11123 ---------------------------------------------------
11124 A. struct_node [->B]
11125 B. comp_1 [->C]
11126 C. comp_2 [->I (move_after)]
11127 I. new_node (first_new) [->F (last_new_tail)]
11128 F. map_to_4 [->G]
11129 G. attach_4 (last_node) [->D]
11130 D. map_to_3 [->E]
11131 E. attach_3 [->H (continue_at)]
11132 H. ...
11135 static tree *
11136 omp_siblist_move_concat_nodes_after (tree first_new, tree *last_new_tail,
11137 tree *first_ptr, tree last_node,
11138 tree *move_after)
11140 tree *continue_at = NULL;
11141 *last_new_tail = *first_ptr;
11142 if (first_ptr == move_after)
11143 *move_after = first_new;
11144 else
11146 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
11147 continue_at = first_ptr;
11148 OMP_CLAUSE_CHAIN (last_node) = *move_after;
11149 *move_after = first_new;
11151 return continue_at;
11154 static omp_addr_token *
11155 omp_first_chained_access_token (vec<omp_addr_token *> &addr_tokens)
11157 using namespace omp_addr_tokenizer;
11158 int idx = addr_tokens.length () - 1;
11159 gcc_assert (idx >= 0);
11160 if (addr_tokens[idx]->type != ACCESS_METHOD)
11161 return addr_tokens[idx];
11162 while (idx > 0 && addr_tokens[idx - 1]->type == ACCESS_METHOD)
11163 idx--;
11164 return addr_tokens[idx];
11167 /* Mapping struct members causes an additional set of nodes to be created,
11168 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
11169 number of members being mapped, in order of ascending position (address or
11170 bitwise).
11172 We scan through the list of mapping clauses, calling this function for each
11173 struct member mapping we find, and build up the list of mappings after the
11174 initial GOMP_MAP_STRUCT node. For pointer members, these will be
11175 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
11176 moved into place in the sorted list.
11178 struct {
11179 int *a;
11180 int *b;
11181 int c;
11182 int *d;
11185 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
11186 struct.d[0:n])
11188 GOMP_MAP_STRUCT (4)
11189 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
11190 GOMP_MAP_ALLOC (struct.a)
11191 GOMP_MAP_ALLOC (struct.b)
11192 GOMP_MAP_TO (struct.c)
11193 GOMP_MAP_ALLOC (struct.d)
11196 In the case where we are mapping references to pointers, or in Fortran if
11197 we are mapping an array with a descriptor, additional nodes may be created
11198 after the struct node list also.
11200 The return code is either a pointer to the next node to process (if the
11201 list has been rearranged), else NULL to continue with the next node in the
11202 original list. */
11204 static tree *
11205 omp_accumulate_sibling_list (enum omp_region_type region_type,
11206 enum tree_code code,
11207 hash_map<tree_operand_hash, tree>
11208 *&struct_map_to_clause,
11209 hash_map<tree_operand_hash_no_se,
11210 omp_mapping_group *> *group_map,
11211 tree *grp_start_p, tree grp_end,
11212 vec<omp_addr_token *> &addr_tokens, tree **inner,
11213 bool *fragile_p, bool reprocessing_struct,
11214 tree **added_tail)
11216 using namespace omp_addr_tokenizer;
11217 poly_offset_int coffset;
11218 poly_int64 cbitpos;
11219 tree ocd = OMP_CLAUSE_DECL (grp_end);
11220 bool openmp = !(region_type & ORT_ACC);
11221 bool target = (region_type & ORT_TARGET) != 0;
11222 tree *continue_at = NULL;
11224 while (TREE_CODE (ocd) == ARRAY_REF)
11225 ocd = TREE_OPERAND (ocd, 0);
11227 if (*fragile_p)
11229 omp_mapping_group *to_group
11230 = omp_get_nonfirstprivate_group (group_map, ocd, true);
11232 if (to_group)
11233 return NULL;
11236 omp_addr_token *last_token = omp_first_chained_access_token (addr_tokens);
11237 if (last_token->type == ACCESS_METHOD)
11239 switch (last_token->u.access_kind)
11241 case ACCESS_REF:
11242 case ACCESS_REF_TO_POINTER:
11243 case ACCESS_REF_TO_POINTER_OFFSET:
11244 case ACCESS_INDEXED_REF_TO_ARRAY:
11245 /* We may see either a bare reference or a dereferenced
11246 "convert_from_reference"-like one here. Handle either way. */
11247 if (TREE_CODE (ocd) == INDIRECT_REF)
11248 ocd = TREE_OPERAND (ocd, 0);
11249 gcc_assert (TREE_CODE (TREE_TYPE (ocd)) == REFERENCE_TYPE);
11250 break;
11252 default:
11257 bool variable_offset;
11258 tree base
11259 = extract_base_bit_offset (ocd, &cbitpos, &coffset, &variable_offset);
11261 int base_token;
11262 for (base_token = addr_tokens.length () - 1; base_token >= 0; base_token--)
11264 if (addr_tokens[base_token]->type == ARRAY_BASE
11265 || addr_tokens[base_token]->type == STRUCTURE_BASE)
11266 break;
11269 /* The two expressions in the assertion below aren't quite the same: if we
11270 have 'struct_base_decl access_indexed_array' for something like
11271 "myvar[2].x" then base will be "myvar" and addr_tokens[base_token]->expr
11272 will be "myvar[2]" -- the actual base of the structure.
11273 The former interpretation leads to a strange situation where we get
11274 struct(myvar) alloc(myvar[2].ptr1)
11275 That is, the array of structures is kind of treated as one big structure
11276 for the purposes of gathering sibling lists, etc. */
11277 /* gcc_assert (base == addr_tokens[base_token]->expr); */
11279 bool attach_detach = ((OMP_CLAUSE_MAP_KIND (grp_end)
11280 == GOMP_MAP_ATTACH_DETACH)
11281 || (OMP_CLAUSE_MAP_KIND (grp_end)
11282 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
11283 bool has_descriptor = false;
11284 if (OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
11286 tree grp_mid = OMP_CLAUSE_CHAIN (*grp_start_p);
11287 if (grp_mid && omp_map_clause_descriptor_p (grp_mid))
11288 has_descriptor = true;
11291 if (!struct_map_to_clause || struct_map_to_clause->get (base) == NULL)
11293 enum gomp_map_kind str_kind = GOMP_MAP_STRUCT;
11295 if (struct_map_to_clause == NULL)
11296 struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
11298 if (variable_offset)
11299 str_kind = GOMP_MAP_STRUCT_UNORD;
11301 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
11303 OMP_CLAUSE_SET_MAP_KIND (l, str_kind);
11304 OMP_CLAUSE_DECL (l) = unshare_expr (base);
11305 OMP_CLAUSE_SIZE (l) = size_int (1);
11307 struct_map_to_clause->put (base, l);
11309 /* On first iterating through the clause list, we insert the struct node
11310 just before the component access node that triggers the initial
11311 omp_accumulate_sibling_list call for a particular sibling list (and
11312 it then forms the first entry in that list). When reprocessing
11313 struct bases that are themselves component accesses, we insert the
11314 struct node on an off-side list to avoid inserting the new
11315 GOMP_MAP_STRUCT into the middle of the old one. */
11316 tree *insert_node_pos = reprocessing_struct ? *added_tail : grp_start_p;
11318 if (has_descriptor)
11320 tree desc = OMP_CLAUSE_CHAIN (*grp_start_p);
11321 if (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
11322 OMP_CLAUSE_SET_MAP_KIND (desc, GOMP_MAP_RELEASE);
11323 tree sc = *insert_node_pos;
11324 OMP_CLAUSE_CHAIN (l) = desc;
11325 OMP_CLAUSE_CHAIN (*grp_start_p) = OMP_CLAUSE_CHAIN (desc);
11326 OMP_CLAUSE_CHAIN (desc) = sc;
11327 *insert_node_pos = l;
11329 else if (attach_detach)
11331 tree extra_node;
11332 tree alloc_node
11333 = build_omp_struct_comp_nodes (code, *grp_start_p, grp_end,
11334 &extra_node);
11335 tree *tail;
11336 OMP_CLAUSE_CHAIN (l) = alloc_node;
11338 if (extra_node)
11340 OMP_CLAUSE_CHAIN (extra_node) = *insert_node_pos;
11341 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
11342 tail = &OMP_CLAUSE_CHAIN (extra_node);
11344 else
11346 OMP_CLAUSE_CHAIN (alloc_node) = *insert_node_pos;
11347 tail = &OMP_CLAUSE_CHAIN (alloc_node);
11350 /* For OpenMP semantics, we don't want to implicitly allocate
11351 space for the pointer here for non-compute regions (e.g. "enter
11352 data"). A FRAGILE_P node is only being created so that
11353 omp-low.cc is able to rewrite the struct properly.
11354 For references (to pointers), we want to actually allocate the
11355 space for the reference itself in the sorted list following the
11356 struct node.
11357 For pointers, we want to allocate space if we had an explicit
11358 mapping of the attachment point, but not otherwise. */
11359 if (*fragile_p
11360 || (openmp
11361 && !target
11362 && attach_detach
11363 && TREE_CODE (TREE_TYPE (ocd)) == POINTER_TYPE
11364 && !OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end)))
11366 if (!lang_GNU_Fortran ())
11367 /* In Fortran, pointers are dereferenced automatically, but may
11368 be unassociated. So we still want to allocate space for the
11369 pointer (as the base for an attach operation that should be
11370 present in the same directive's clause list also). */
11371 OMP_CLAUSE_SIZE (alloc_node) = size_zero_node;
11372 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (alloc_node) = 1;
11375 *insert_node_pos = l;
11377 if (reprocessing_struct)
11379 /* When reprocessing a struct node group used as the base of a
11380 subcomponent access, if we have a reference-to-pointer base,
11381 we will see:
11382 struct(**ptr) attach(*ptr)
11383 whereas for a non-reprocess-struct group, we see, e.g.:
11384 tofrom(**ptr) attach(*ptr) attach(ptr)
11385 and we create the "alloc" for the second "attach", i.e.
11386 for the reference itself. When reprocessing a struct group we
11387 thus change the pointer attachment into a reference attachment
11388 by stripping the indirection. (The attachment of the
11389 referenced pointer must happen elsewhere, either on the same
11390 directive, or otherwise.) */
11391 tree adecl = OMP_CLAUSE_DECL (alloc_node);
11393 if ((TREE_CODE (adecl) == INDIRECT_REF
11394 || (TREE_CODE (adecl) == MEM_REF
11395 && integer_zerop (TREE_OPERAND (adecl, 1))))
11396 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (adecl, 0)))
11397 == REFERENCE_TYPE)
11398 && (TREE_CODE (TREE_TYPE (TREE_TYPE
11399 (TREE_OPERAND (adecl, 0)))) == POINTER_TYPE))
11400 OMP_CLAUSE_DECL (alloc_node) = TREE_OPERAND (adecl, 0);
11402 *added_tail = tail;
11405 else
11407 gcc_assert (*grp_start_p == grp_end);
11408 if (reprocessing_struct)
11410 /* If we don't have an attach/detach node, this is a
11411 "target data" directive or similar, not an offload region.
11412 Synthesize an "alloc" node using just the initiating
11413 GOMP_MAP_STRUCT decl. */
11414 gomp_map_kind k = (code == OMP_TARGET_EXIT_DATA
11415 || code == OACC_EXIT_DATA)
11416 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
11417 tree alloc_node
11418 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
11419 OMP_CLAUSE_MAP);
11420 OMP_CLAUSE_SET_MAP_KIND (alloc_node, k);
11421 OMP_CLAUSE_DECL (alloc_node) = unshare_expr (last_token->expr);
11422 OMP_CLAUSE_SIZE (alloc_node)
11423 = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (alloc_node)));
11425 OMP_CLAUSE_CHAIN (alloc_node) = OMP_CLAUSE_CHAIN (l);
11426 OMP_CLAUSE_CHAIN (l) = alloc_node;
11427 *insert_node_pos = l;
11428 *added_tail = &OMP_CLAUSE_CHAIN (alloc_node);
11430 else
11431 grp_start_p = omp_siblist_insert_node_after (l, insert_node_pos);
11434 unsigned last_access = base_token + 1;
11436 while (last_access + 1 < addr_tokens.length ()
11437 && addr_tokens[last_access + 1]->type == ACCESS_METHOD)
11438 last_access++;
11440 if ((region_type & ORT_TARGET)
11441 && addr_tokens[base_token + 1]->type == ACCESS_METHOD)
11443 bool base_ref = false;
11444 access_method_kinds access_kind
11445 = addr_tokens[last_access]->u.access_kind;
11447 switch (access_kind)
11449 case ACCESS_DIRECT:
11450 case ACCESS_INDEXED_ARRAY:
11451 return NULL;
11453 case ACCESS_REF:
11454 case ACCESS_REF_TO_POINTER:
11455 case ACCESS_REF_TO_POINTER_OFFSET:
11456 case ACCESS_INDEXED_REF_TO_ARRAY:
11457 base_ref = true;
11458 break;
11460 default:
11463 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
11464 OMP_CLAUSE_MAP);
11465 enum gomp_map_kind mkind;
11466 omp_mapping_group *decl_group;
11467 tree use_base;
11468 switch (access_kind)
11470 case ACCESS_POINTER:
11471 case ACCESS_POINTER_OFFSET:
11472 use_base = addr_tokens[last_access]->expr;
11473 break;
11474 case ACCESS_REF_TO_POINTER:
11475 case ACCESS_REF_TO_POINTER_OFFSET:
11476 use_base
11477 = build_fold_indirect_ref (addr_tokens[last_access]->expr);
11478 break;
11479 default:
11480 use_base = addr_tokens[base_token]->expr;
11482 bool mapped_to_p
11483 = omp_directive_maps_explicitly (group_map, use_base, &decl_group,
11484 true, false, true);
11485 if (addr_tokens[base_token]->type == STRUCTURE_BASE
11486 && DECL_P (addr_tokens[last_access]->expr)
11487 && !mapped_to_p)
11488 mkind = base_ref ? GOMP_MAP_FIRSTPRIVATE_REFERENCE
11489 : GOMP_MAP_FIRSTPRIVATE_POINTER;
11490 else
11491 mkind = GOMP_MAP_ATTACH_DETACH;
11493 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
11494 /* If we have a reference to pointer base, we want to attach the
11495 pointer here, not the reference. The reference attachment happens
11496 elsewhere. */
11497 bool ref_to_ptr
11498 = (access_kind == ACCESS_REF_TO_POINTER
11499 || access_kind == ACCESS_REF_TO_POINTER_OFFSET);
11500 tree sdecl = addr_tokens[last_access]->expr;
11501 tree sdecl_ptr = ref_to_ptr ? build_fold_indirect_ref (sdecl)
11502 : sdecl;
11503 /* For the FIRSTPRIVATE_REFERENCE after the struct node, we
11504 want to use the reference itself for the decl, but we
11505 still want to use the pointer to calculate the bias. */
11506 OMP_CLAUSE_DECL (c2) = (mkind == GOMP_MAP_ATTACH_DETACH)
11507 ? sdecl_ptr : sdecl;
11508 sdecl = sdecl_ptr;
11509 tree baddr = build_fold_addr_expr (base);
11510 baddr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
11511 ptrdiff_type_node, baddr);
11512 tree decladdr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
11513 ptrdiff_type_node, sdecl);
11514 OMP_CLAUSE_SIZE (c2)
11515 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
11516 ptrdiff_type_node, baddr, decladdr);
11517 /* Insert after struct node. */
11518 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
11519 OMP_CLAUSE_CHAIN (l) = c2;
11521 if (addr_tokens[base_token]->type == STRUCTURE_BASE
11522 && (addr_tokens[base_token]->u.structure_base_kind
11523 == BASE_COMPONENT_EXPR)
11524 && mkind == GOMP_MAP_ATTACH_DETACH
11525 && addr_tokens[last_access]->u.access_kind != ACCESS_REF)
11527 *inner = insert_node_pos;
11528 if (openmp)
11529 *fragile_p = true;
11530 return NULL;
11534 if (addr_tokens[base_token]->type == STRUCTURE_BASE
11535 && (addr_tokens[base_token]->u.structure_base_kind
11536 == BASE_COMPONENT_EXPR)
11537 && addr_tokens[last_access]->u.access_kind == ACCESS_REF)
11538 *inner = insert_node_pos;
11540 return NULL;
11542 else if (struct_map_to_clause)
11544 tree *osc = struct_map_to_clause->get (base);
11545 tree *sc = NULL, *scp = NULL;
11546 bool unordered = false;
11548 if (osc && OMP_CLAUSE_MAP_KIND (*osc) == GOMP_MAP_STRUCT_UNORD)
11549 unordered = true;
11551 unsigned HOST_WIDE_INT i, elems = tree_to_uhwi (OMP_CLAUSE_SIZE (*osc));
11552 sc = &OMP_CLAUSE_CHAIN (*osc);
11553 /* The struct mapping might be immediately followed by a
11554 FIRSTPRIVATE_POINTER, FIRSTPRIVATE_REFERENCE or an ATTACH_DETACH --
11555 if it's an indirect access or a reference, or if the structure base
11556 is not a decl. The FIRSTPRIVATE_* nodes are removed in omp-low.cc
11557 after they have been processed there, and ATTACH_DETACH nodes are
11558 recomputed and moved out of the GOMP_MAP_STRUCT construct once
11559 sibling list building is complete. */
11560 if (OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_POINTER
11561 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
11562 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_ATTACH_DETACH)
11563 sc = &OMP_CLAUSE_CHAIN (*sc);
11564 for (i = 0; i < elems; i++, sc = &OMP_CLAUSE_CHAIN (*sc))
11565 if (attach_detach && sc == grp_start_p)
11566 break;
11567 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) != COMPONENT_REF
11568 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != INDIRECT_REF
11569 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != ARRAY_REF)
11570 break;
11571 else
11573 tree sc_decl = OMP_CLAUSE_DECL (*sc);
11574 poly_offset_int offset;
11575 poly_int64 bitpos;
11577 if (TREE_CODE (sc_decl) == ARRAY_REF)
11579 while (TREE_CODE (sc_decl) == ARRAY_REF)
11580 sc_decl = TREE_OPERAND (sc_decl, 0);
11581 if (TREE_CODE (sc_decl) != COMPONENT_REF
11582 || TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
11583 break;
11585 else if (INDIRECT_REF_P (sc_decl)
11586 && TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
11587 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
11588 == REFERENCE_TYPE))
11589 sc_decl = TREE_OPERAND (sc_decl, 0);
11591 bool variable_offset2;
11592 tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset,
11593 &variable_offset2);
11594 if (!base2 || !operand_equal_p (base2, base, 0))
11595 break;
11596 if (scp)
11597 continue;
11598 if (variable_offset2)
11600 OMP_CLAUSE_SET_MAP_KIND (*osc, GOMP_MAP_STRUCT_UNORD);
11601 unordered = true;
11602 break;
11604 else if ((region_type & ORT_ACC) != 0)
11606 /* For OpenACC, allow (ignore) duplicate struct accesses in
11607 the middle of a mapping clause, e.g. "mystruct->foo" in:
11608 copy(mystruct->foo->bar) copy(mystruct->foo->qux). */
11609 if (reprocessing_struct
11610 && known_eq (coffset, offset)
11611 && known_eq (cbitpos, bitpos))
11612 return NULL;
11614 else if (known_eq (coffset, offset)
11615 && known_eq (cbitpos, bitpos))
11617 /* Having two struct members at the same offset doesn't work,
11618 so make sure we don't. (We're allowed to ignore this.
11619 Should we report the error?) */
11620 /*error_at (OMP_CLAUSE_LOCATION (grp_end),
11621 "duplicate struct member %qE in map clauses",
11622 OMP_CLAUSE_DECL (grp_end));*/
11623 return NULL;
11625 if (maybe_lt (coffset, offset)
11626 || (known_eq (coffset, offset)
11627 && maybe_lt (cbitpos, bitpos)))
11629 if (attach_detach)
11630 scp = sc;
11631 else
11632 break;
11636 /* If this is an unordered struct, just insert the new element at the
11637 end of the list. */
11638 if (unordered)
11640 for (; i < elems; i++)
11641 sc = &OMP_CLAUSE_CHAIN (*sc);
11642 scp = NULL;
11645 OMP_CLAUSE_SIZE (*osc)
11646 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
11648 if (reprocessing_struct)
11650 /* If we're reprocessing a struct node, we don't want to do most of
11651 the list manipulation below. We only need to handle the (pointer
11652 or reference) attach/detach case. */
11653 tree extra_node, alloc_node;
11654 if (has_descriptor)
11655 gcc_unreachable ();
11656 else if (attach_detach)
11657 alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
11658 grp_end, &extra_node);
11659 else
11661 /* If we don't have an attach/detach node, this is a
11662 "target data" directive or similar, not an offload region.
11663 Synthesize an "alloc" node using just the initiating
11664 GOMP_MAP_STRUCT decl. */
11665 gomp_map_kind k = (code == OMP_TARGET_EXIT_DATA
11666 || code == OACC_EXIT_DATA)
11667 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
11668 alloc_node
11669 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
11670 OMP_CLAUSE_MAP);
11671 OMP_CLAUSE_SET_MAP_KIND (alloc_node, k);
11672 OMP_CLAUSE_DECL (alloc_node) = unshare_expr (last_token->expr);
11673 OMP_CLAUSE_SIZE (alloc_node)
11674 = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (alloc_node)));
11677 if (scp)
11678 omp_siblist_insert_node_after (alloc_node, scp);
11679 else
11681 tree *new_end = omp_siblist_insert_node_after (alloc_node, sc);
11682 if (sc == *added_tail)
11683 *added_tail = new_end;
11686 return NULL;
11689 if (has_descriptor)
11691 tree desc = OMP_CLAUSE_CHAIN (*grp_start_p);
11692 if (code == OMP_TARGET_EXIT_DATA
11693 || code == OACC_EXIT_DATA)
11694 OMP_CLAUSE_SET_MAP_KIND (desc, GOMP_MAP_RELEASE);
11695 omp_siblist_move_node_after (desc,
11696 &OMP_CLAUSE_CHAIN (*grp_start_p),
11697 scp ? scp : sc);
11699 else if (attach_detach)
11701 tree cl = NULL_TREE, extra_node;
11702 tree alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
11703 grp_end, &extra_node);
11704 tree *tail_chain = NULL;
11706 if (*fragile_p
11707 || (openmp
11708 && !target
11709 && attach_detach
11710 && TREE_CODE (TREE_TYPE (ocd)) == POINTER_TYPE
11711 && !OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end)))
11713 if (!lang_GNU_Fortran ())
11714 OMP_CLAUSE_SIZE (alloc_node) = size_zero_node;
11715 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (alloc_node) = 1;
11718 /* Here, we have:
11720 grp_end : the last (or only) node in this group.
11721 grp_start_p : pointer to the first node in a pointer mapping group
11722 up to and including GRP_END.
11723 sc : pointer to the chain for the end of the struct component
11724 list.
11725 scp : pointer to the chain for the sorted position at which we
11726 should insert in the middle of the struct component list
11727 (else NULL to insert at end).
11728 alloc_node : the "alloc" node for the structure (pointer-type)
11729 component. We insert at SCP (if present), else SC
11730 (the end of the struct component list).
11731 extra_node : a newly-synthesized node for an additional indirect
11732 pointer mapping or a Fortran pointer set, if needed.
11733 cl : first node to prepend before grp_start_p.
11734 tail_chain : pointer to chain of last prepended node.
11736 The general idea is we move the nodes for this struct mapping
11737 together: the alloc node goes into the sorted list directly after
11738 the struct mapping, and any extra nodes (together with the nodes
11739 mapping arrays pointed to by struct components) get moved after
11740 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
11741 the end of the struct component mapping list. It's important that
11742 the alloc_node comes first in that case because it's part of the
11743 sorted component mapping list (but subsequent nodes are not!). */
11745 if (scp)
11746 omp_siblist_insert_node_after (alloc_node, scp);
11748 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
11749 already inserted it) and the extra_node (if it is present). The
11750 list can be empty if we added alloc_node above and there is no
11751 extra node. */
11752 if (scp && extra_node)
11754 cl = extra_node;
11755 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
11757 else if (extra_node)
11759 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
11760 cl = alloc_node;
11761 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
11763 else if (!scp)
11765 cl = alloc_node;
11766 tail_chain = &OMP_CLAUSE_CHAIN (alloc_node);
11769 continue_at
11770 = cl ? omp_siblist_move_concat_nodes_after (cl, tail_chain,
11771 grp_start_p, grp_end,
11773 : omp_siblist_move_nodes_after (grp_start_p, grp_end, sc);
11775 else if (*sc != grp_end)
11777 gcc_assert (*grp_start_p == grp_end);
11779 /* We are moving the current node back to a previous struct node:
11780 the node that used to point to the current node will now point to
11781 the next node. */
11782 continue_at = grp_start_p;
11783 /* In the non-pointer case, the mapping clause itself is moved into
11784 the correct position in the struct component list, which in this
11785 case is just SC. */
11786 omp_siblist_move_node_after (*grp_start_p, grp_start_p, sc);
11789 return continue_at;
11792 /* Scan through GROUPS, and create sorted structure sibling lists without
11793 gimplifying. */
11795 static bool
11796 omp_build_struct_sibling_lists (enum tree_code code,
11797 enum omp_region_type region_type,
11798 vec<omp_mapping_group> *groups,
11799 hash_map<tree_operand_hash_no_se,
11800 omp_mapping_group *> **grpmap,
11801 tree *list_p)
11803 using namespace omp_addr_tokenizer;
11804 unsigned i;
11805 omp_mapping_group *grp;
11806 hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
11807 bool success = true;
11808 tree *new_next = NULL;
11809 tree *tail = &OMP_CLAUSE_CHAIN ((*groups)[groups->length () - 1].grp_end);
11810 tree added_nodes = NULL_TREE;
11811 tree *added_tail = &added_nodes;
11812 auto_vec<omp_mapping_group> pre_hwm_groups;
11814 FOR_EACH_VEC_ELT (*groups, i, grp)
11816 tree c = grp->grp_end;
11817 tree decl = OMP_CLAUSE_DECL (c);
11818 tree grp_end = grp->grp_end;
11819 auto_vec<omp_addr_token *> addr_tokens;
11820 tree sentinel = OMP_CLAUSE_CHAIN (grp_end);
11822 if (new_next && !grp->reprocess_struct)
11823 grp->grp_start = new_next;
11825 new_next = NULL;
11827 tree *grp_start_p = grp->grp_start;
11829 if (DECL_P (decl))
11830 continue;
11832 /* Skip groups we marked for deletion in
11833 {omp,oacc}_resolve_clause_dependencies. */
11834 if (grp->deleted)
11835 continue;
11837 if (OMP_CLAUSE_CHAIN (*grp_start_p)
11838 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
11840 /* Don't process an array descriptor that isn't inside a derived type
11841 as a struct (the GOMP_MAP_POINTER following will have the form
11842 "var.data", but such mappings are handled specially). */
11843 tree grpmid = OMP_CLAUSE_CHAIN (*grp_start_p);
11844 if (omp_map_clause_descriptor_p (grpmid)
11845 && DECL_P (OMP_CLAUSE_DECL (grpmid)))
11846 continue;
11849 tree expr = decl;
11851 while (TREE_CODE (expr) == ARRAY_REF)
11852 expr = TREE_OPERAND (expr, 0);
11854 if (!omp_parse_expr (addr_tokens, expr))
11855 continue;
11857 omp_addr_token *last_token
11858 = omp_first_chained_access_token (addr_tokens);
11860 /* A mapping of a reference to a pointer member that doesn't specify an
11861 array section, etc., like this:
11862 *mystruct.ref_to_ptr
11863 should not be processed by the struct sibling-list handling code --
11864 it just transfers the referenced pointer.
11866 In contrast, the quite similar-looking construct:
11867 *mystruct.ptr
11868 which is equivalent to e.g.
11869 mystruct.ptr[0]
11870 *does* trigger sibling-list processing.
11872 An exception for the former case is for "fragile" groups where the
11873 reference itself is not handled otherwise; this is subject to special
11874 handling in omp_accumulate_sibling_list also. */
11876 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
11877 && last_token->type == ACCESS_METHOD
11878 && last_token->u.access_kind == ACCESS_REF
11879 && !grp->fragile)
11880 continue;
11882 tree d = decl;
11883 if (TREE_CODE (d) == ARRAY_REF)
11885 while (TREE_CODE (d) == ARRAY_REF)
11886 d = TREE_OPERAND (d, 0);
11887 if (TREE_CODE (d) == COMPONENT_REF
11888 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
11889 decl = d;
11891 if (d == decl
11892 && INDIRECT_REF_P (decl)
11893 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
11894 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
11895 == REFERENCE_TYPE)
11896 && (OMP_CLAUSE_MAP_KIND (c)
11897 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
11898 decl = TREE_OPERAND (decl, 0);
11900 STRIP_NOPS (decl);
11902 if (TREE_CODE (decl) != COMPONENT_REF)
11903 continue;
11905 /* If we're mapping the whole struct in another node, skip adding this
11906 node to a sibling list. */
11907 omp_mapping_group *wholestruct;
11908 if (omp_mapped_by_containing_struct (*grpmap, OMP_CLAUSE_DECL (c),
11909 &wholestruct))
11910 continue;
11912 if (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
11913 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
11914 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
11915 && code != OACC_UPDATE
11916 && code != OMP_TARGET_UPDATE)
11918 if (error_operand_p (decl))
11920 success = false;
11921 goto error_out;
11924 tree stype = TREE_TYPE (decl);
11925 if (TREE_CODE (stype) == REFERENCE_TYPE)
11926 stype = TREE_TYPE (stype);
11927 if (TYPE_SIZE_UNIT (stype) == NULL
11928 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
11930 error_at (OMP_CLAUSE_LOCATION (c),
11931 "mapping field %qE of variable length "
11932 "structure", OMP_CLAUSE_DECL (c));
11933 success = false;
11934 goto error_out;
11937 tree *inner = NULL;
11938 bool fragile_p = grp->fragile;
11940 new_next
11941 = omp_accumulate_sibling_list (region_type, code,
11942 struct_map_to_clause, *grpmap,
11943 grp_start_p, grp_end, addr_tokens,
11944 &inner, &fragile_p,
11945 grp->reprocess_struct, &added_tail);
11947 if (inner)
11949 omp_mapping_group newgrp;
11950 newgrp.grp_start = inner;
11951 if (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (*inner))
11952 == GOMP_MAP_ATTACH_DETACH)
11953 newgrp.grp_end = OMP_CLAUSE_CHAIN (*inner);
11954 else
11955 newgrp.grp_end = *inner;
11956 newgrp.mark = UNVISITED;
11957 newgrp.sibling = NULL;
11958 newgrp.deleted = false;
11959 newgrp.reprocess_struct = true;
11960 newgrp.fragile = fragile_p;
11961 newgrp.next = NULL;
11962 groups->safe_push (newgrp);
11964 /* !!! Growing GROUPS might invalidate the pointers in the group
11965 map. Rebuild it here. This is a bit inefficient, but
11966 shouldn't happen very often. */
11967 delete (*grpmap);
11968 *grpmap
11969 = omp_reindex_mapping_groups (list_p, groups, &pre_hwm_groups,
11970 sentinel);
11975 /* Delete groups marked for deletion above. At this point the order of the
11976 groups may no longer correspond to the order of the underlying list,
11977 which complicates this a little. First clear out OMP_CLAUSE_DECL for
11978 deleted nodes... */
11980 FOR_EACH_VEC_ELT (*groups, i, grp)
11981 if (grp->deleted)
11982 for (tree d = *grp->grp_start;
11983 d != OMP_CLAUSE_CHAIN (grp->grp_end);
11984 d = OMP_CLAUSE_CHAIN (d))
11985 OMP_CLAUSE_DECL (d) = NULL_TREE;
11987 /* ...then sweep through the list removing the now-empty nodes. */
11989 tail = list_p;
11990 while (*tail)
11992 if (OMP_CLAUSE_CODE (*tail) == OMP_CLAUSE_MAP
11993 && OMP_CLAUSE_DECL (*tail) == NULL_TREE)
11994 *tail = OMP_CLAUSE_CHAIN (*tail);
11995 else
11996 tail = &OMP_CLAUSE_CHAIN (*tail);
11999 /* Tack on the struct nodes added during nested struct reprocessing. */
12000 if (added_nodes)
12002 *tail = added_nodes;
12003 tail = added_tail;
12006 /* Now we have finished building the struct sibling lists, reprocess
12007 newly-added "attach" nodes: we need the address of the first
12008 mapped element of each struct sibling list for the bias of the attach
12009 operation -- not necessarily the base address of the whole struct. */
12010 if (struct_map_to_clause)
12011 for (hash_map<tree_operand_hash, tree>::iterator iter
12012 = struct_map_to_clause->begin ();
12013 iter != struct_map_to_clause->end ();
12014 ++iter)
12016 tree struct_node = (*iter).second;
12017 gcc_assert (OMP_CLAUSE_CODE (struct_node) == OMP_CLAUSE_MAP);
12018 tree attach = OMP_CLAUSE_CHAIN (struct_node);
12020 if (OMP_CLAUSE_CODE (attach) != OMP_CLAUSE_MAP
12021 || OMP_CLAUSE_MAP_KIND (attach) != GOMP_MAP_ATTACH_DETACH)
12022 continue;
12024 OMP_CLAUSE_SET_MAP_KIND (attach, GOMP_MAP_ATTACH);
12026 /* Sanity check: the standalone attach node will not work if we have
12027 an "enter data" operation (because for those, variables need to be
12028 mapped separately and attach nodes must be grouped together with the
12029 base they attach to). We should only have created the
12030 ATTACH_DETACH node after GOMP_MAP_STRUCT for a target region, so
12031 this should never be true. */
12032 gcc_assert ((region_type & ORT_TARGET) != 0);
12034 /* This is the first sorted node in the struct sibling list. Use it
12035 to recalculate the correct bias to use.
12036 (&first_node - attach_decl).
12037 For GOMP_MAP_STRUCT_UNORD, we need e.g. the
12038 min(min(min(first,second),third),fourth) element, because the
12039 elements aren't in any particular order. */
12040 tree lowest_addr;
12041 if (OMP_CLAUSE_MAP_KIND (struct_node) == GOMP_MAP_STRUCT_UNORD)
12043 tree first_node = OMP_CLAUSE_CHAIN (attach);
12044 unsigned HOST_WIDE_INT num_mappings
12045 = tree_to_uhwi (OMP_CLAUSE_SIZE (struct_node));
12046 lowest_addr = OMP_CLAUSE_DECL (first_node);
12047 lowest_addr = build_fold_addr_expr (lowest_addr);
12048 lowest_addr = fold_convert (pointer_sized_int_node, lowest_addr);
12049 tree next_node = OMP_CLAUSE_CHAIN (first_node);
12050 while (num_mappings > 1)
12052 tree tmp = OMP_CLAUSE_DECL (next_node);
12053 tmp = build_fold_addr_expr (tmp);
12054 tmp = fold_convert (pointer_sized_int_node, tmp);
12055 lowest_addr = fold_build2 (MIN_EXPR, pointer_sized_int_node,
12056 lowest_addr, tmp);
12057 next_node = OMP_CLAUSE_CHAIN (next_node);
12058 num_mappings--;
12060 lowest_addr = fold_convert (ptrdiff_type_node, lowest_addr);
12062 else
12064 tree first_node = OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (attach));
12065 first_node = build_fold_addr_expr (first_node);
12066 lowest_addr = fold_convert (ptrdiff_type_node, first_node);
12068 tree attach_decl = OMP_CLAUSE_DECL (attach);
12069 attach_decl = fold_convert (ptrdiff_type_node, attach_decl);
12070 OMP_CLAUSE_SIZE (attach)
12071 = fold_build2 (MINUS_EXPR, ptrdiff_type_node, lowest_addr,
12072 attach_decl);
12074 /* Remove GOMP_MAP_ATTACH node from after struct node. */
12075 OMP_CLAUSE_CHAIN (struct_node) = OMP_CLAUSE_CHAIN (attach);
12076 /* ...and re-insert it at the end of our clause list. */
12077 *tail = attach;
12078 OMP_CLAUSE_CHAIN (attach) = NULL_TREE;
12079 tail = &OMP_CLAUSE_CHAIN (attach);
12082 error_out:
12083 if (struct_map_to_clause)
12084 delete struct_map_to_clause;
12086 return success;
12089 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
12090 and previous omp contexts. */
12092 static void
12093 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
12094 enum omp_region_type region_type,
12095 enum tree_code code)
12097 using namespace omp_addr_tokenizer;
12098 struct gimplify_omp_ctx *ctx, *outer_ctx;
12099 tree c;
12100 tree *orig_list_p = list_p;
12101 int handled_depend_iterators = -1;
12102 int nowait = -1;
12104 ctx = new_omp_context (region_type);
12105 ctx->code = code;
12106 outer_ctx = ctx->outer_context;
12107 if (code == OMP_TARGET)
12109 if (!lang_GNU_Fortran ())
12110 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
12111 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
12112 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
12113 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
12115 if (!lang_GNU_Fortran ())
12116 switch (code)
12118 case OMP_TARGET:
12119 case OMP_TARGET_DATA:
12120 case OMP_TARGET_ENTER_DATA:
12121 case OMP_TARGET_EXIT_DATA:
12122 case OACC_DECLARE:
12123 case OACC_HOST_DATA:
12124 case OACC_PARALLEL:
12125 case OACC_KERNELS:
12126 ctx->target_firstprivatize_array_bases = true;
12127 default:
12128 break;
12131 vec<omp_mapping_group> *groups = NULL;
12132 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap = NULL;
12133 unsigned grpnum = 0;
12134 tree *grp_start_p = NULL, grp_end = NULL_TREE;
12136 if (code == OMP_TARGET
12137 || code == OMP_TARGET_DATA
12138 || code == OMP_TARGET_ENTER_DATA
12139 || code == OMP_TARGET_EXIT_DATA
12140 || code == OACC_DATA
12141 || code == OACC_KERNELS
12142 || code == OACC_PARALLEL
12143 || code == OACC_SERIAL
12144 || code == OACC_ENTER_DATA
12145 || code == OACC_EXIT_DATA
12146 || code == OACC_UPDATE
12147 || code == OACC_DECLARE)
12149 groups = omp_gather_mapping_groups (list_p);
12151 if (groups)
12152 grpmap = omp_index_mapping_groups (groups);
12155 while ((c = *list_p) != NULL)
12157 bool remove = false;
12158 bool notice_outer = true;
12159 bool map_descriptor;
12160 const char *check_non_private = NULL;
12161 unsigned int flags;
12162 tree decl;
12163 auto_vec<omp_addr_token *, 10> addr_tokens;
12165 if (grp_end && c == OMP_CLAUSE_CHAIN (grp_end))
12167 grp_start_p = NULL;
12168 grp_end = NULL_TREE;
12171 switch (OMP_CLAUSE_CODE (c))
12173 case OMP_CLAUSE_PRIVATE:
12174 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
12175 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
12177 flags |= GOVD_PRIVATE_OUTER_REF;
12178 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
12180 else
12181 notice_outer = false;
12182 goto do_add;
12183 case OMP_CLAUSE_SHARED:
12184 flags = GOVD_SHARED | GOVD_EXPLICIT;
12185 goto do_add;
12186 case OMP_CLAUSE_FIRSTPRIVATE:
12187 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
12188 check_non_private = "firstprivate";
12189 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12191 gcc_assert (code == OMP_TARGET);
12192 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
12194 goto do_add;
12195 case OMP_CLAUSE_LASTPRIVATE:
12196 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12197 switch (code)
12199 case OMP_DISTRIBUTE:
12200 error_at (OMP_CLAUSE_LOCATION (c),
12201 "conditional %<lastprivate%> clause on "
12202 "%qs construct", "distribute");
12203 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
12204 break;
12205 case OMP_TASKLOOP:
12206 error_at (OMP_CLAUSE_LOCATION (c),
12207 "conditional %<lastprivate%> clause on "
12208 "%qs construct", "taskloop");
12209 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
12210 break;
12211 default:
12212 break;
12214 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
12215 if (code != OMP_LOOP)
12216 check_non_private = "lastprivate";
12217 decl = OMP_CLAUSE_DECL (c);
12218 if (error_operand_p (decl))
12219 goto do_add;
12220 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
12221 && !lang_hooks.decls.omp_scalar_p (decl, true))
12223 error_at (OMP_CLAUSE_LOCATION (c),
12224 "non-scalar variable %qD in conditional "
12225 "%<lastprivate%> clause", decl);
12226 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
12228 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12229 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
12230 omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
12231 false);
12232 goto do_add;
12233 case OMP_CLAUSE_REDUCTION:
12234 if (OMP_CLAUSE_REDUCTION_TASK (c))
12236 if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
12238 if (nowait == -1)
12239 nowait = omp_find_clause (*list_p,
12240 OMP_CLAUSE_NOWAIT) != NULL_TREE;
12241 if (nowait
12242 && (outer_ctx == NULL
12243 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
12245 error_at (OMP_CLAUSE_LOCATION (c),
12246 "%<task%> reduction modifier on a construct "
12247 "with a %<nowait%> clause");
12248 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
12251 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
12253 error_at (OMP_CLAUSE_LOCATION (c),
12254 "invalid %<task%> reduction modifier on construct "
12255 "other than %<parallel%>, %qs, %<sections%> or "
12256 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
12257 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
12260 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
12261 switch (code)
12263 case OMP_SECTIONS:
12264 error_at (OMP_CLAUSE_LOCATION (c),
12265 "%<inscan%> %<reduction%> clause on "
12266 "%qs construct", "sections");
12267 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
12268 break;
12269 case OMP_PARALLEL:
12270 error_at (OMP_CLAUSE_LOCATION (c),
12271 "%<inscan%> %<reduction%> clause on "
12272 "%qs construct", "parallel");
12273 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
12274 break;
12275 case OMP_TEAMS:
12276 error_at (OMP_CLAUSE_LOCATION (c),
12277 "%<inscan%> %<reduction%> clause on "
12278 "%qs construct", "teams");
12279 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
12280 break;
12281 case OMP_TASKLOOP:
12282 error_at (OMP_CLAUSE_LOCATION (c),
12283 "%<inscan%> %<reduction%> clause on "
12284 "%qs construct", "taskloop");
12285 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
12286 break;
12287 case OMP_SCOPE:
12288 error_at (OMP_CLAUSE_LOCATION (c),
12289 "%<inscan%> %<reduction%> clause on "
12290 "%qs construct", "scope");
12291 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
12292 break;
12293 default:
12294 break;
12296 /* FALLTHRU */
12297 case OMP_CLAUSE_IN_REDUCTION:
12298 case OMP_CLAUSE_TASK_REDUCTION:
12299 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
12300 /* OpenACC permits reductions on private variables. */
12301 if (!(region_type & ORT_ACC)
12302 /* taskgroup is actually not a worksharing region. */
12303 && code != OMP_TASKGROUP)
12304 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
12305 decl = OMP_CLAUSE_DECL (c);
12306 if (TREE_CODE (decl) == MEM_REF)
12308 tree type = TREE_TYPE (decl);
12309 bool saved_into_ssa = gimplify_ctxp->into_ssa;
12310 gimplify_ctxp->into_ssa = false;
12311 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
12312 NULL, is_gimple_val, fb_rvalue, false)
12313 == GS_ERROR)
12315 gimplify_ctxp->into_ssa = saved_into_ssa;
12316 remove = true;
12317 break;
12319 gimplify_ctxp->into_ssa = saved_into_ssa;
12320 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12321 if (DECL_P (v))
12323 omp_firstprivatize_variable (ctx, v);
12324 omp_notice_variable (ctx, v, true);
12326 decl = TREE_OPERAND (decl, 0);
12327 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12329 gimplify_ctxp->into_ssa = false;
12330 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
12331 NULL, is_gimple_val, fb_rvalue, false)
12332 == GS_ERROR)
12334 gimplify_ctxp->into_ssa = saved_into_ssa;
12335 remove = true;
12336 break;
12338 gimplify_ctxp->into_ssa = saved_into_ssa;
12339 v = TREE_OPERAND (decl, 1);
12340 if (DECL_P (v))
12342 omp_firstprivatize_variable (ctx, v);
12343 omp_notice_variable (ctx, v, true);
12345 decl = TREE_OPERAND (decl, 0);
12347 if (TREE_CODE (decl) == ADDR_EXPR
12348 || TREE_CODE (decl) == INDIRECT_REF)
12349 decl = TREE_OPERAND (decl, 0);
12351 goto do_add_decl;
12352 case OMP_CLAUSE_LINEAR:
12353 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
12354 is_gimple_val, fb_rvalue) == GS_ERROR)
12356 remove = true;
12357 break;
12359 else
12361 if (code == OMP_SIMD
12362 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
12364 struct gimplify_omp_ctx *octx = outer_ctx;
12365 if (octx
12366 && octx->region_type == ORT_WORKSHARE
12367 && octx->combined_loop
12368 && !octx->distribute)
12370 if (octx->outer_context
12371 && (octx->outer_context->region_type
12372 == ORT_COMBINED_PARALLEL))
12373 octx = octx->outer_context->outer_context;
12374 else
12375 octx = octx->outer_context;
12377 if (octx
12378 && octx->region_type == ORT_WORKSHARE
12379 && octx->combined_loop
12380 && octx->distribute)
12382 error_at (OMP_CLAUSE_LOCATION (c),
12383 "%<linear%> clause for variable other than "
12384 "loop iterator specified on construct "
12385 "combined with %<distribute%>");
12386 remove = true;
12387 break;
12390 /* For combined #pragma omp parallel for simd, need to put
12391 lastprivate and perhaps firstprivate too on the
12392 parallel. Similarly for #pragma omp for simd. */
12393 struct gimplify_omp_ctx *octx = outer_ctx;
12394 bool taskloop_seen = false;
12395 decl = NULL_TREE;
12398 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
12399 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
12400 break;
12401 decl = OMP_CLAUSE_DECL (c);
12402 if (error_operand_p (decl))
12404 decl = NULL_TREE;
12405 break;
12407 flags = GOVD_SEEN;
12408 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
12409 flags |= GOVD_FIRSTPRIVATE;
12410 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
12411 flags |= GOVD_LASTPRIVATE;
12412 if (octx
12413 && octx->region_type == ORT_WORKSHARE
12414 && octx->combined_loop)
12416 if (octx->outer_context
12417 && (octx->outer_context->region_type
12418 == ORT_COMBINED_PARALLEL))
12419 octx = octx->outer_context;
12420 else if (omp_check_private (octx, decl, false))
12421 break;
12423 else if (octx
12424 && (octx->region_type & ORT_TASK) != 0
12425 && octx->combined_loop)
12426 taskloop_seen = true;
12427 else if (octx
12428 && octx->region_type == ORT_COMBINED_PARALLEL
12429 && ((ctx->region_type == ORT_WORKSHARE
12430 && octx == outer_ctx)
12431 || taskloop_seen))
12432 flags = GOVD_SEEN | GOVD_SHARED;
12433 else if (octx
12434 && ((octx->region_type & ORT_COMBINED_TEAMS)
12435 == ORT_COMBINED_TEAMS))
12436 flags = GOVD_SEEN | GOVD_SHARED;
12437 else if (octx
12438 && octx->region_type == ORT_COMBINED_TARGET)
12440 if (flags & GOVD_LASTPRIVATE)
12441 flags = GOVD_SEEN | GOVD_MAP;
12443 else
12444 break;
12445 splay_tree_node on
12446 = splay_tree_lookup (octx->variables,
12447 (splay_tree_key) decl);
12448 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
12450 octx = NULL;
12451 break;
12453 omp_add_variable (octx, decl, flags);
12454 if (octx->outer_context == NULL)
12455 break;
12456 octx = octx->outer_context;
12458 while (1);
12459 if (octx
12460 && decl
12461 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
12462 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
12463 omp_notice_variable (octx, decl, true);
12465 flags = GOVD_LINEAR | GOVD_EXPLICIT;
12466 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
12467 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
12469 notice_outer = false;
12470 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
12472 goto do_add;
12474 case OMP_CLAUSE_MAP:
12475 if (!grp_start_p)
12477 grp_start_p = list_p;
12478 grp_end = (*groups)[grpnum].grp_end;
12479 grpnum++;
12481 decl = OMP_CLAUSE_DECL (c);
12483 if (error_operand_p (decl))
12485 remove = true;
12486 break;
12489 if (!omp_parse_expr (addr_tokens, decl))
12491 remove = true;
12492 break;
12495 if (remove)
12496 break;
12497 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
12499 struct gimplify_omp_ctx *octx;
12500 for (octx = outer_ctx; octx; octx = octx->outer_context)
12502 if (octx->region_type != ORT_ACC_HOST_DATA)
12503 break;
12504 splay_tree_node n2
12505 = splay_tree_lookup (octx->variables,
12506 (splay_tree_key) decl);
12507 if (n2)
12508 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
12509 "declared in enclosing %<host_data%> region",
12510 DECL_NAME (decl));
12514 map_descriptor = false;
12516 /* This condition checks if we're mapping an array descriptor that
12517 isn't inside a derived type -- these have special handling, and
12518 are not handled as structs in omp_build_struct_sibling_lists.
12519 See that function for further details. */
12520 if (*grp_start_p != grp_end
12521 && OMP_CLAUSE_CHAIN (*grp_start_p)
12522 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
12524 tree grp_mid = OMP_CLAUSE_CHAIN (*grp_start_p);
12525 if (omp_map_clause_descriptor_p (grp_mid)
12526 && DECL_P (OMP_CLAUSE_DECL (grp_mid)))
12527 map_descriptor = true;
12529 else if (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP
12530 && (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_RELEASE
12531 || OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_DELETE)
12532 && OMP_CLAUSE_RELEASE_DESCRIPTOR (grp_end))
12533 map_descriptor = true;
12535 /* Adding the decl for a struct access: we haven't created
12536 GOMP_MAP_STRUCT nodes yet, so this statement needs to predict
12537 whether they will be created in gimplify_adjust_omp_clauses.
12538 NOTE: Technically we should probably look through DECL_VALUE_EXPR
12539 here because something that looks like a DECL_P may actually be a
12540 struct access, e.g. variables in a lambda closure
12541 (__closure->__foo) or class members (this->foo). Currently in both
12542 those cases we map the whole of the containing object (directly in
12543 the C++ FE) though, so struct nodes are not created. */
12544 if (c == grp_end
12545 && addr_tokens[0]->type == STRUCTURE_BASE
12546 && addr_tokens[0]->u.structure_base_kind == BASE_DECL
12547 && !map_descriptor)
12549 gcc_assert (addr_tokens[1]->type == ACCESS_METHOD);
12550 /* If we got to this struct via a chain of pointers, maybe we
12551 want to map it implicitly instead. */
12552 if (omp_access_chain_p (addr_tokens, 1))
12553 break;
12554 omp_mapping_group *wholestruct;
12555 if (!(region_type & ORT_ACC)
12556 && omp_mapped_by_containing_struct (grpmap,
12557 OMP_CLAUSE_DECL (c),
12558 &wholestruct))
12559 break;
12560 decl = addr_tokens[1]->expr;
12561 if (splay_tree_lookup (ctx->variables, (splay_tree_key) decl))
12562 break;
12563 /* Standalone attach or detach clauses for a struct element
12564 should not inhibit implicit mapping of the whole struct. */
12565 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12566 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12567 break;
12568 flags = GOVD_MAP | GOVD_EXPLICIT;
12570 gcc_assert (addr_tokens[1]->u.access_kind != ACCESS_DIRECT
12571 || TREE_ADDRESSABLE (decl));
12572 goto do_add_decl;
12575 if (!DECL_P (decl))
12577 tree d = decl, *pd;
12578 if (TREE_CODE (d) == ARRAY_REF)
12580 while (TREE_CODE (d) == ARRAY_REF)
12581 d = TREE_OPERAND (d, 0);
12582 if (TREE_CODE (d) == COMPONENT_REF
12583 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
12584 decl = d;
12586 pd = &OMP_CLAUSE_DECL (c);
12587 if (d == decl
12588 && TREE_CODE (decl) == INDIRECT_REF
12589 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
12590 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
12591 == REFERENCE_TYPE)
12592 && (OMP_CLAUSE_MAP_KIND (c)
12593 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
12595 pd = &TREE_OPERAND (decl, 0);
12596 decl = TREE_OPERAND (decl, 0);
12599 if (addr_tokens[0]->type == STRUCTURE_BASE
12600 && addr_tokens[0]->u.structure_base_kind == BASE_DECL
12601 && addr_tokens[1]->type == ACCESS_METHOD
12602 && (addr_tokens[1]->u.access_kind == ACCESS_POINTER
12603 || (addr_tokens[1]->u.access_kind
12604 == ACCESS_POINTER_OFFSET))
12605 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)))
12607 tree base = addr_tokens[1]->expr;
12608 splay_tree_node n
12609 = splay_tree_lookup (ctx->variables,
12610 (splay_tree_key) base);
12611 n->value |= GOVD_SEEN;
12614 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
12616 /* Don't gimplify *pd fully at this point, as the base
12617 will need to be adjusted during omp lowering. */
12618 auto_vec<tree, 10> expr_stack;
12619 tree *p = pd;
12620 while (handled_component_p (*p)
12621 || TREE_CODE (*p) == INDIRECT_REF
12622 || TREE_CODE (*p) == ADDR_EXPR
12623 || TREE_CODE (*p) == MEM_REF
12624 || TREE_CODE (*p) == NON_LVALUE_EXPR)
12626 expr_stack.safe_push (*p);
12627 p = &TREE_OPERAND (*p, 0);
12629 for (int i = expr_stack.length () - 1; i >= 0; i--)
12631 tree t = expr_stack[i];
12632 if (TREE_CODE (t) == ARRAY_REF
12633 || TREE_CODE (t) == ARRAY_RANGE_REF)
12635 if (TREE_OPERAND (t, 2) == NULL_TREE)
12637 tree low = unshare_expr (array_ref_low_bound (t));
12638 if (!is_gimple_min_invariant (low))
12640 TREE_OPERAND (t, 2) = low;
12641 if (gimplify_expr (&TREE_OPERAND (t, 2),
12642 pre_p, NULL,
12643 is_gimple_reg,
12644 fb_rvalue) == GS_ERROR)
12645 remove = true;
12648 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
12649 NULL, is_gimple_reg,
12650 fb_rvalue) == GS_ERROR)
12651 remove = true;
12652 if (TREE_OPERAND (t, 3) == NULL_TREE)
12654 tree elmt_size = array_ref_element_size (t);
12655 if (!is_gimple_min_invariant (elmt_size))
12657 elmt_size = unshare_expr (elmt_size);
12658 tree elmt_type
12659 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
12660 0)));
12661 tree factor
12662 = size_int (TYPE_ALIGN_UNIT (elmt_type));
12663 elmt_size
12664 = size_binop (EXACT_DIV_EXPR, elmt_size,
12665 factor);
12666 TREE_OPERAND (t, 3) = elmt_size;
12667 if (gimplify_expr (&TREE_OPERAND (t, 3),
12668 pre_p, NULL,
12669 is_gimple_reg,
12670 fb_rvalue) == GS_ERROR)
12671 remove = true;
12674 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
12675 NULL, is_gimple_reg,
12676 fb_rvalue) == GS_ERROR)
12677 remove = true;
12679 else if (TREE_CODE (t) == COMPONENT_REF)
12681 if (TREE_OPERAND (t, 2) == NULL_TREE)
12683 tree offset = component_ref_field_offset (t);
12684 if (!is_gimple_min_invariant (offset))
12686 offset = unshare_expr (offset);
12687 tree field = TREE_OPERAND (t, 1);
12688 tree factor
12689 = size_int (DECL_OFFSET_ALIGN (field)
12690 / BITS_PER_UNIT);
12691 offset = size_binop (EXACT_DIV_EXPR, offset,
12692 factor);
12693 TREE_OPERAND (t, 2) = offset;
12694 if (gimplify_expr (&TREE_OPERAND (t, 2),
12695 pre_p, NULL,
12696 is_gimple_reg,
12697 fb_rvalue) == GS_ERROR)
12698 remove = true;
12701 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
12702 NULL, is_gimple_reg,
12703 fb_rvalue) == GS_ERROR)
12704 remove = true;
12707 for (; expr_stack.length () > 0; )
12709 tree t = expr_stack.pop ();
12711 if (TREE_CODE (t) == ARRAY_REF
12712 || TREE_CODE (t) == ARRAY_RANGE_REF)
12714 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
12715 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
12716 NULL, is_gimple_val,
12717 fb_rvalue) == GS_ERROR)
12718 remove = true;
12722 break;
12725 if ((code == OMP_TARGET
12726 || code == OMP_TARGET_DATA
12727 || code == OMP_TARGET_ENTER_DATA
12728 || code == OMP_TARGET_EXIT_DATA)
12729 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
12731 /* If we have attach/detach but the decl we have is a pointer to
12732 pointer, we're probably mapping the "base level" array
12733 implicitly. Make sure we don't add the decl as if we mapped
12734 it explicitly. That is,
12736 int **arr;
12737 [...]
12738 #pragma omp target map(arr[a][b:c])
12740 should *not* map "arr" explicitly. That way we get a
12741 zero-length "alloc" mapping for it, and assuming it's been
12742 mapped by some previous directive, etc., things work as they
12743 should. */
12745 tree basetype = TREE_TYPE (addr_tokens[0]->expr);
12747 if (TREE_CODE (basetype) == REFERENCE_TYPE)
12748 basetype = TREE_TYPE (basetype);
12750 if (code == OMP_TARGET
12751 && addr_tokens[0]->type == ARRAY_BASE
12752 && addr_tokens[0]->u.structure_base_kind == BASE_DECL
12753 && TREE_CODE (basetype) == POINTER_TYPE
12754 && TREE_CODE (TREE_TYPE (basetype)) == POINTER_TYPE)
12755 break;
12758 flags = GOVD_MAP | GOVD_EXPLICIT;
12759 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
12760 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM
12761 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_PRESENT_TO
12762 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_PRESENT_TOFROM)
12763 flags |= GOVD_MAP_ALWAYS_TO;
12765 goto do_add;
12767 case OMP_CLAUSE_AFFINITY:
12768 gimplify_omp_affinity (list_p, pre_p);
12769 remove = true;
12770 break;
12771 case OMP_CLAUSE_DOACROSS:
12772 if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
12774 tree deps = OMP_CLAUSE_DECL (c);
12775 while (deps && TREE_CODE (deps) == TREE_LIST)
12777 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
12778 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
12779 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
12780 pre_p, NULL, is_gimple_val, fb_rvalue);
12781 deps = TREE_CHAIN (deps);
12784 else
12785 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c)
12786 == OMP_CLAUSE_DOACROSS_SOURCE);
12787 break;
12788 case OMP_CLAUSE_DEPEND:
12789 if (handled_depend_iterators == -1)
12790 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
12791 if (handled_depend_iterators)
12793 if (handled_depend_iterators == 2)
12794 remove = true;
12795 break;
12797 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
12799 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
12800 NULL, is_gimple_val, fb_rvalue);
12801 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
12803 if (error_operand_p (OMP_CLAUSE_DECL (c)))
12805 remove = true;
12806 break;
12808 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
12810 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
12811 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
12812 is_gimple_val, fb_rvalue) == GS_ERROR)
12814 remove = true;
12815 break;
12818 if (code == OMP_TASK)
12819 ctx->has_depend = true;
12820 break;
12822 case OMP_CLAUSE_TO:
12823 case OMP_CLAUSE_FROM:
12824 case OMP_CLAUSE__CACHE_:
12825 decl = OMP_CLAUSE_DECL (c);
12826 if (error_operand_p (decl))
12828 remove = true;
12829 break;
12831 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12832 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
12833 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
12834 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
12835 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
12837 remove = true;
12838 break;
12840 if (!DECL_P (decl))
12842 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
12843 NULL, is_gimple_lvalue, fb_lvalue)
12844 == GS_ERROR)
12846 remove = true;
12847 break;
12849 break;
12851 goto do_notice;
12853 case OMP_CLAUSE_USE_DEVICE_PTR:
12854 case OMP_CLAUSE_USE_DEVICE_ADDR:
12855 flags = GOVD_EXPLICIT;
12856 goto do_add;
12858 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12859 decl = OMP_CLAUSE_DECL (c);
12860 while (TREE_CODE (decl) == INDIRECT_REF
12861 || TREE_CODE (decl) == ARRAY_REF)
12862 decl = TREE_OPERAND (decl, 0);
12863 flags = GOVD_EXPLICIT;
12864 goto do_add_decl;
12866 case OMP_CLAUSE_IS_DEVICE_PTR:
12867 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
12868 goto do_add;
12870 do_add:
12871 decl = OMP_CLAUSE_DECL (c);
12872 do_add_decl:
12873 if (error_operand_p (decl))
12875 remove = true;
12876 break;
12878 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
12880 tree t = omp_member_access_dummy_var (decl);
12881 if (t)
12883 tree v = DECL_VALUE_EXPR (decl);
12884 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
12885 if (outer_ctx)
12886 omp_notice_variable (outer_ctx, t, true);
12889 if (code == OACC_DATA
12890 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12891 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
12892 flags |= GOVD_MAP_0LEN_ARRAY;
12893 omp_add_variable (ctx, decl, flags);
12894 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12895 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
12896 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
12897 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
12899 struct gimplify_omp_ctx *pctx
12900 = code == OMP_TARGET ? outer_ctx : ctx;
12901 if (pctx)
12902 omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
12903 GOVD_LOCAL | GOVD_SEEN);
12904 if (pctx
12905 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
12906 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
12907 find_decl_expr,
12908 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
12909 NULL) == NULL_TREE)
12910 omp_add_variable (pctx,
12911 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
12912 GOVD_LOCAL | GOVD_SEEN);
12913 gimplify_omp_ctxp = pctx;
12914 push_gimplify_context ();
12916 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
12917 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
12919 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
12920 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
12921 pop_gimplify_context
12922 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
12923 push_gimplify_context ();
12924 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
12925 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
12926 pop_gimplify_context
12927 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
12928 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
12929 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
12931 gimplify_omp_ctxp = outer_ctx;
12933 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12934 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
12936 gimplify_omp_ctxp = ctx;
12937 push_gimplify_context ();
12938 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
12940 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
12941 NULL, NULL);
12942 TREE_SIDE_EFFECTS (bind) = 1;
12943 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
12944 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
12946 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
12947 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
12948 pop_gimplify_context
12949 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
12950 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
12952 gimplify_omp_ctxp = outer_ctx;
12954 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12955 && OMP_CLAUSE_LINEAR_STMT (c))
12957 gimplify_omp_ctxp = ctx;
12958 push_gimplify_context ();
12959 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
12961 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
12962 NULL, NULL);
12963 TREE_SIDE_EFFECTS (bind) = 1;
12964 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
12965 OMP_CLAUSE_LINEAR_STMT (c) = bind;
12967 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
12968 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
12969 pop_gimplify_context
12970 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
12971 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
12973 gimplify_omp_ctxp = outer_ctx;
12975 if (notice_outer)
12976 goto do_notice;
12977 break;
12979 case OMP_CLAUSE_COPYIN:
12980 case OMP_CLAUSE_COPYPRIVATE:
12981 decl = OMP_CLAUSE_DECL (c);
12982 if (error_operand_p (decl))
12984 remove = true;
12985 break;
12987 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
12988 && !remove
12989 && !omp_check_private (ctx, decl, true))
12991 remove = true;
12992 if (is_global_var (decl))
12994 if (DECL_THREAD_LOCAL_P (decl))
12995 remove = false;
12996 else if (DECL_HAS_VALUE_EXPR_P (decl))
12998 tree value = get_base_address (DECL_VALUE_EXPR (decl));
13000 if (value
13001 && DECL_P (value)
13002 && DECL_THREAD_LOCAL_P (value))
13003 remove = false;
13006 if (remove)
13007 error_at (OMP_CLAUSE_LOCATION (c),
13008 "copyprivate variable %qE is not threadprivate"
13009 " or private in outer context", DECL_NAME (decl));
13011 do_notice:
13012 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
13013 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
13014 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
13015 && outer_ctx
13016 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
13017 || (region_type == ORT_WORKSHARE
13018 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
13019 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
13020 || code == OMP_LOOP)))
13021 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
13022 || (code == OMP_LOOP
13023 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
13024 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
13025 == ORT_COMBINED_TEAMS))))
13027 splay_tree_node on
13028 = splay_tree_lookup (outer_ctx->variables,
13029 (splay_tree_key)decl);
13030 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
13032 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
13033 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
13034 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
13035 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
13036 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
13037 == POINTER_TYPE))))
13038 omp_firstprivatize_variable (outer_ctx, decl);
13039 else
13041 omp_add_variable (outer_ctx, decl,
13042 GOVD_SEEN | GOVD_SHARED);
13043 if (outer_ctx->outer_context)
13044 omp_notice_variable (outer_ctx->outer_context, decl,
13045 true);
13049 if (outer_ctx)
13050 omp_notice_variable (outer_ctx, decl, true);
13051 if (check_non_private
13052 && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
13053 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
13054 || decl == OMP_CLAUSE_DECL (c)
13055 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
13056 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13057 == ADDR_EXPR
13058 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13059 == POINTER_PLUS_EXPR
13060 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
13061 (OMP_CLAUSE_DECL (c), 0), 0))
13062 == ADDR_EXPR)))))
13063 && omp_check_private (ctx, decl, false))
13065 error ("%s variable %qE is private in outer context",
13066 check_non_private, DECL_NAME (decl));
13067 remove = true;
13069 break;
13071 case OMP_CLAUSE_DETACH:
13072 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
13073 goto do_add;
13075 case OMP_CLAUSE_IF:
13076 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
13077 && OMP_CLAUSE_IF_MODIFIER (c) != code)
13079 const char *p[2];
13080 for (int i = 0; i < 2; i++)
13081 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
13083 case VOID_CST: p[i] = "cancel"; break;
13084 case OMP_PARALLEL: p[i] = "parallel"; break;
13085 case OMP_SIMD: p[i] = "simd"; break;
13086 case OMP_TASK: p[i] = "task"; break;
13087 case OMP_TASKLOOP: p[i] = "taskloop"; break;
13088 case OMP_TARGET_DATA: p[i] = "target data"; break;
13089 case OMP_TARGET: p[i] = "target"; break;
13090 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
13091 case OMP_TARGET_ENTER_DATA:
13092 p[i] = "target enter data"; break;
13093 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
13094 default: gcc_unreachable ();
13096 error_at (OMP_CLAUSE_LOCATION (c),
13097 "expected %qs %<if%> clause modifier rather than %qs",
13098 p[0], p[1]);
13099 remove = true;
13101 /* Fall through. */
13103 case OMP_CLAUSE_SELF:
13104 case OMP_CLAUSE_FINAL:
13105 OMP_CLAUSE_OPERAND (c, 0)
13106 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
13107 /* Fall through. */
13109 case OMP_CLAUSE_NUM_TEAMS:
13110 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
13111 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
13112 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
13114 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
13116 remove = true;
13117 break;
13119 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
13120 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
13121 pre_p, NULL, true);
13123 /* Fall through. */
13125 case OMP_CLAUSE_SCHEDULE:
13126 case OMP_CLAUSE_NUM_THREADS:
13127 case OMP_CLAUSE_THREAD_LIMIT:
13128 case OMP_CLAUSE_DIST_SCHEDULE:
13129 case OMP_CLAUSE_DEVICE:
13130 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
13131 && OMP_CLAUSE_DEVICE_ANCESTOR (c))
13133 if (code != OMP_TARGET)
13135 error_at (OMP_CLAUSE_LOCATION (c),
13136 "%<device%> clause with %<ancestor%> is only "
13137 "allowed on %<target%> construct");
13138 remove = true;
13139 break;
13142 tree clauses = *orig_list_p;
13143 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
13144 if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
13145 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
13146 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
13147 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
13148 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
13151 error_at (OMP_CLAUSE_LOCATION (c),
13152 "with %<ancestor%>, only the %<device%>, "
13153 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
13154 "and %<map%> clauses may appear on the "
13155 "construct");
13156 remove = true;
13157 break;
13160 /* Fall through. */
13162 case OMP_CLAUSE_PRIORITY:
13163 case OMP_CLAUSE_GRAINSIZE:
13164 case OMP_CLAUSE_NUM_TASKS:
13165 case OMP_CLAUSE_FILTER:
13166 case OMP_CLAUSE_HINT:
13167 case OMP_CLAUSE_ASYNC:
13168 case OMP_CLAUSE_WAIT:
13169 case OMP_CLAUSE_NUM_GANGS:
13170 case OMP_CLAUSE_NUM_WORKERS:
13171 case OMP_CLAUSE_VECTOR_LENGTH:
13172 case OMP_CLAUSE_WORKER:
13173 case OMP_CLAUSE_VECTOR:
13174 if (OMP_CLAUSE_OPERAND (c, 0)
13175 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
13177 if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
13179 remove = true;
13180 break;
13182 /* All these clauses care about value, not a particular decl,
13183 so try to force it into a SSA_NAME or fresh temporary. */
13184 OMP_CLAUSE_OPERAND (c, 0)
13185 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
13186 pre_p, NULL, true);
13188 break;
13190 case OMP_CLAUSE_GANG:
13191 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
13192 is_gimple_val, fb_rvalue) == GS_ERROR)
13193 remove = true;
13194 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
13195 is_gimple_val, fb_rvalue) == GS_ERROR)
13196 remove = true;
13197 break;
13199 case OMP_CLAUSE_NOWAIT:
13200 nowait = 1;
13201 break;
13203 case OMP_CLAUSE_ORDERED:
13204 case OMP_CLAUSE_UNTIED:
13205 case OMP_CLAUSE_COLLAPSE:
13206 case OMP_CLAUSE_TILE:
13207 case OMP_CLAUSE_AUTO:
13208 case OMP_CLAUSE_SEQ:
13209 case OMP_CLAUSE_INDEPENDENT:
13210 case OMP_CLAUSE_MERGEABLE:
13211 case OMP_CLAUSE_PROC_BIND:
13212 case OMP_CLAUSE_SAFELEN:
13213 case OMP_CLAUSE_SIMDLEN:
13214 case OMP_CLAUSE_NOGROUP:
13215 case OMP_CLAUSE_THREADS:
13216 case OMP_CLAUSE_SIMD:
13217 case OMP_CLAUSE_BIND:
13218 case OMP_CLAUSE_IF_PRESENT:
13219 case OMP_CLAUSE_FINALIZE:
13220 break;
13222 case OMP_CLAUSE_ORDER:
13223 ctx->order_concurrent = true;
13224 break;
13226 case OMP_CLAUSE_DEFAULTMAP:
13227 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
13228 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
13230 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
13231 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALL:
13232 gdmkmin = GDMK_SCALAR;
13233 gdmkmax = GDMK_POINTER;
13234 break;
13235 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
13236 gdmkmin = GDMK_SCALAR;
13237 gdmkmax = GDMK_SCALAR_TARGET;
13238 break;
13239 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
13240 gdmkmin = gdmkmax = GDMK_AGGREGATE;
13241 break;
13242 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
13243 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
13244 break;
13245 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
13246 gdmkmin = gdmkmax = GDMK_POINTER;
13247 break;
13248 default:
13249 gcc_unreachable ();
13251 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
13252 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
13254 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
13255 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
13256 break;
13257 case OMP_CLAUSE_DEFAULTMAP_TO:
13258 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
13259 break;
13260 case OMP_CLAUSE_DEFAULTMAP_FROM:
13261 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
13262 break;
13263 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
13264 ctx->defaultmap[gdmk] = GOVD_MAP;
13265 break;
13266 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
13267 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
13268 break;
13269 case OMP_CLAUSE_DEFAULTMAP_NONE:
13270 ctx->defaultmap[gdmk] = 0;
13271 break;
13272 case OMP_CLAUSE_DEFAULTMAP_PRESENT:
13273 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
13274 break;
13275 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
13276 switch (gdmk)
13278 case GDMK_SCALAR:
13279 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
13280 break;
13281 case GDMK_SCALAR_TARGET:
13282 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
13283 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
13284 break;
13285 case GDMK_AGGREGATE:
13286 case GDMK_ALLOCATABLE:
13287 ctx->defaultmap[gdmk] = GOVD_MAP;
13288 break;
13289 case GDMK_POINTER:
13290 ctx->defaultmap[gdmk] = GOVD_MAP;
13291 if (!lang_GNU_Fortran ())
13292 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
13293 break;
13294 default:
13295 gcc_unreachable ();
13297 break;
13298 default:
13299 gcc_unreachable ();
13301 break;
13303 case OMP_CLAUSE_ALIGNED:
13304 decl = OMP_CLAUSE_DECL (c);
13305 if (error_operand_p (decl))
13307 remove = true;
13308 break;
13310 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
13311 is_gimple_val, fb_rvalue) == GS_ERROR)
13313 remove = true;
13314 break;
13316 if (!is_global_var (decl)
13317 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
13318 omp_add_variable (ctx, decl, GOVD_ALIGNED);
13319 break;
13321 case OMP_CLAUSE_NONTEMPORAL:
13322 decl = OMP_CLAUSE_DECL (c);
13323 if (error_operand_p (decl))
13325 remove = true;
13326 break;
13328 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
13329 break;
13331 case OMP_CLAUSE_ALLOCATE:
13332 decl = OMP_CLAUSE_DECL (c);
13333 if (error_operand_p (decl))
13335 remove = true;
13336 break;
13338 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
13339 is_gimple_val, fb_rvalue) == GS_ERROR)
13341 remove = true;
13342 break;
13344 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
13345 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
13346 == INTEGER_CST))
13348 else if (code == OMP_TASKLOOP
13349 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
13350 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
13351 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13352 pre_p, NULL, false);
13353 break;
13355 case OMP_CLAUSE_DEFAULT:
13356 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
13357 break;
13359 case OMP_CLAUSE_INCLUSIVE:
13360 case OMP_CLAUSE_EXCLUSIVE:
13361 decl = OMP_CLAUSE_DECL (c);
13363 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
13364 (splay_tree_key) decl);
13365 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
13367 error_at (OMP_CLAUSE_LOCATION (c),
13368 "%qD specified in %qs clause but not in %<inscan%> "
13369 "%<reduction%> clause on the containing construct",
13370 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
13371 remove = true;
13373 else
13375 n->value |= GOVD_REDUCTION_INSCAN;
13376 if (outer_ctx->region_type == ORT_SIMD
13377 && outer_ctx->outer_context
13378 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
13380 n = splay_tree_lookup (outer_ctx->outer_context->variables,
13381 (splay_tree_key) decl);
13382 if (n && (n->value & GOVD_REDUCTION) != 0)
13383 n->value |= GOVD_REDUCTION_INSCAN;
13387 break;
13389 case OMP_CLAUSE_NOHOST:
13390 default:
13391 gcc_unreachable ();
13394 if (code == OACC_DATA
13395 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13396 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13397 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
13398 remove = true;
13399 if (remove)
13400 *list_p = OMP_CLAUSE_CHAIN (c);
13401 else
13402 list_p = &OMP_CLAUSE_CHAIN (c);
13405 if (groups)
13407 delete grpmap;
13408 delete groups;
13411 ctx->clauses = *orig_list_p;
13412 gimplify_omp_ctxp = ctx;
13415 /* Return true if DECL is a candidate for shared to firstprivate
13416 optimization. We only consider non-addressable scalars, not
13417 too big, and not references. */
13419 static bool
13420 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
13422 if (TREE_ADDRESSABLE (decl))
13423 return false;
13424 tree type = TREE_TYPE (decl);
13425 if (!is_gimple_reg_type (type)
13426 || TREE_CODE (type) == REFERENCE_TYPE
13427 || TREE_ADDRESSABLE (type))
13428 return false;
13429 /* Don't optimize too large decls, as each thread/task will have
13430 its own. */
13431 HOST_WIDE_INT len = int_size_in_bytes (type);
13432 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
13433 return false;
13434 if (omp_privatize_by_reference (decl))
13435 return false;
13436 return true;
13439 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
13440 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
13441 GOVD_WRITTEN in outer contexts. */
13443 static void
13444 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
13446 for (; ctx; ctx = ctx->outer_context)
13448 splay_tree_node n = splay_tree_lookup (ctx->variables,
13449 (splay_tree_key) decl);
13450 if (n == NULL)
13451 continue;
13452 else if (n->value & GOVD_SHARED)
13454 n->value |= GOVD_WRITTEN;
13455 return;
13457 else if (n->value & GOVD_DATA_SHARE_CLASS)
13458 return;
13462 /* Helper callback for walk_gimple_seq to discover possible stores
13463 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
13464 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
13465 for those. */
13467 static tree
13468 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
13470 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
13472 *walk_subtrees = 0;
13473 if (!wi->is_lhs)
13474 return NULL_TREE;
13476 tree op = *tp;
13479 if (handled_component_p (op))
13480 op = TREE_OPERAND (op, 0);
13481 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
13482 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
13483 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
13484 else
13485 break;
13487 while (1);
13488 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
13489 return NULL_TREE;
13491 omp_mark_stores (gimplify_omp_ctxp, op);
13492 return NULL_TREE;
13495 /* Helper callback for walk_gimple_seq to discover possible stores
13496 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
13497 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
13498 for those. */
13500 static tree
13501 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
13502 bool *handled_ops_p,
13503 struct walk_stmt_info *wi)
13505 gimple *stmt = gsi_stmt (*gsi_p);
13506 switch (gimple_code (stmt))
13508 /* Don't recurse on OpenMP constructs for which
13509 gimplify_adjust_omp_clauses already handled the bodies,
13510 except handle gimple_omp_for_pre_body. */
13511 case GIMPLE_OMP_FOR:
13512 *handled_ops_p = true;
13513 if (gimple_omp_for_pre_body (stmt))
13514 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
13515 omp_find_stores_stmt, omp_find_stores_op, wi);
13516 break;
13517 case GIMPLE_OMP_PARALLEL:
13518 case GIMPLE_OMP_TASK:
13519 case GIMPLE_OMP_SECTIONS:
13520 case GIMPLE_OMP_SINGLE:
13521 case GIMPLE_OMP_SCOPE:
13522 case GIMPLE_OMP_TARGET:
13523 case GIMPLE_OMP_TEAMS:
13524 case GIMPLE_OMP_CRITICAL:
13525 *handled_ops_p = true;
13526 break;
13527 default:
13528 break;
13530 return NULL_TREE;
13533 struct gimplify_adjust_omp_clauses_data
13535 tree *list_p;
13536 gimple_seq *pre_p;
13539 /* For all variables that were not actually used within the context,
13540 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
13542 static int
13543 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
13545 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
13546 gimple_seq *pre_p
13547 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
13548 tree decl = (tree) n->key;
13549 unsigned flags = n->value;
13550 enum omp_clause_code code;
13551 tree clause;
13552 bool private_debug;
13554 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
13555 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
13556 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
13557 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
13558 return 0;
13559 if ((flags & GOVD_SEEN) == 0)
13560 return 0;
13561 if (flags & GOVD_DEBUG_PRIVATE)
13563 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
13564 private_debug = true;
13566 else if (flags & GOVD_MAP)
13567 private_debug = false;
13568 else
13569 private_debug
13570 = lang_hooks.decls.omp_private_debug_clause (decl,
13571 !!(flags & GOVD_SHARED));
13572 if (private_debug)
13573 code = OMP_CLAUSE_PRIVATE;
13574 else if (flags & GOVD_MAP)
13576 code = OMP_CLAUSE_MAP;
13577 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
13578 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
13580 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
13581 return 0;
13583 if (VAR_P (decl)
13584 && DECL_IN_CONSTANT_POOL (decl)
13585 && !lookup_attribute ("omp declare target",
13586 DECL_ATTRIBUTES (decl)))
13588 tree id = get_identifier ("omp declare target");
13589 DECL_ATTRIBUTES (decl)
13590 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13591 varpool_node *node = varpool_node::get (decl);
13592 if (node)
13594 node->offloadable = 1;
13595 if (ENABLE_OFFLOADING)
13596 g->have_offload = true;
13600 else if (flags & GOVD_SHARED)
13602 if (is_global_var (decl))
13604 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
13605 while (ctx != NULL)
13607 splay_tree_node on
13608 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13609 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
13610 | GOVD_PRIVATE | GOVD_REDUCTION
13611 | GOVD_LINEAR | GOVD_MAP)) != 0)
13612 break;
13613 ctx = ctx->outer_context;
13615 if (ctx == NULL)
13616 return 0;
13618 code = OMP_CLAUSE_SHARED;
13619 /* Don't optimize shared into firstprivate for read-only vars
13620 on tasks with depend clause, we shouldn't try to copy them
13621 until the dependencies are satisfied. */
13622 if (gimplify_omp_ctxp->has_depend)
13623 flags |= GOVD_WRITTEN;
13625 else if (flags & GOVD_PRIVATE)
13626 code = OMP_CLAUSE_PRIVATE;
13627 else if (flags & GOVD_FIRSTPRIVATE)
13629 code = OMP_CLAUSE_FIRSTPRIVATE;
13630 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
13631 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
13632 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
13634 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
13635 "%<target%> construct", decl);
13636 return 0;
13639 else if (flags & GOVD_LASTPRIVATE)
13640 code = OMP_CLAUSE_LASTPRIVATE;
13641 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
13642 return 0;
13643 else if (flags & GOVD_CONDTEMP)
13645 code = OMP_CLAUSE__CONDTEMP_;
13646 gimple_add_tmp_var (decl);
13648 else
13649 gcc_unreachable ();
13651 if (((flags & GOVD_LASTPRIVATE)
13652 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
13653 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13654 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
13656 tree chain = *list_p;
13657 clause = build_omp_clause (input_location, code);
13658 OMP_CLAUSE_DECL (clause) = decl;
13659 OMP_CLAUSE_CHAIN (clause) = chain;
13660 if (private_debug)
13661 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
13662 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
13663 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
13664 else if (code == OMP_CLAUSE_SHARED
13665 && (flags & GOVD_WRITTEN) == 0
13666 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13667 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
13668 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
13669 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
13670 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
13672 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
13673 OMP_CLAUSE_DECL (nc) = decl;
13674 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
13675 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
13676 OMP_CLAUSE_DECL (clause)
13677 = build_fold_indirect_ref_loc (input_location, decl);
13678 OMP_CLAUSE_DECL (clause)
13679 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
13680 build_int_cst (build_pointer_type (char_type_node), 0));
13681 OMP_CLAUSE_SIZE (clause) = size_zero_node;
13682 OMP_CLAUSE_SIZE (nc) = size_zero_node;
13683 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
13684 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
13685 tree dtype = TREE_TYPE (decl);
13686 if (TREE_CODE (dtype) == REFERENCE_TYPE)
13687 dtype = TREE_TYPE (dtype);
13688 /* FIRSTPRIVATE_POINTER doesn't work well if we have a
13689 multiply-indirected pointer. If we have a reference to a pointer to
13690 a pointer, it's possible that this should really be
13691 GOMP_MAP_FIRSTPRIVATE_REFERENCE -- but that also doesn't work at the
13692 moment, so stick with this. (See PR113279 and testcases
13693 baseptrs-{4,6}.C:ref2ptrptr_offset_decl_member_slice). */
13694 if (TREE_CODE (dtype) == POINTER_TYPE
13695 && TREE_CODE (TREE_TYPE (dtype)) == POINTER_TYPE)
13696 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
13697 else
13698 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
13699 OMP_CLAUSE_CHAIN (nc) = chain;
13700 OMP_CLAUSE_CHAIN (clause) = nc;
13701 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13702 gimplify_omp_ctxp = ctx->outer_context;
13703 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
13704 pre_p, NULL, is_gimple_val, fb_rvalue);
13705 gimplify_omp_ctxp = ctx;
13707 else if (code == OMP_CLAUSE_MAP)
13709 int kind;
13710 /* Not all combinations of these GOVD_MAP flags are actually valid. */
13711 switch (flags & (GOVD_MAP_TO_ONLY
13712 | GOVD_MAP_FORCE
13713 | GOVD_MAP_FORCE_PRESENT
13714 | GOVD_MAP_ALLOC_ONLY
13715 | GOVD_MAP_FROM_ONLY))
13717 case 0:
13718 kind = GOMP_MAP_TOFROM;
13719 break;
13720 case GOVD_MAP_FORCE:
13721 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
13722 break;
13723 case GOVD_MAP_TO_ONLY:
13724 kind = GOMP_MAP_TO;
13725 break;
13726 case GOVD_MAP_FROM_ONLY:
13727 kind = GOMP_MAP_FROM;
13728 break;
13729 case GOVD_MAP_ALLOC_ONLY:
13730 kind = GOMP_MAP_ALLOC;
13731 break;
13732 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
13733 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
13734 break;
13735 case GOVD_MAP_FORCE_PRESENT:
13736 kind = GOMP_MAP_FORCE_PRESENT;
13737 break;
13738 case GOVD_MAP_FORCE_PRESENT | GOVD_MAP_ALLOC_ONLY:
13739 kind = GOMP_MAP_FORCE_PRESENT;
13740 break;
13741 default:
13742 gcc_unreachable ();
13744 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
13745 /* Setting of the implicit flag for the runtime is currently disabled for
13746 OpenACC. */
13747 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
13748 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
13749 if (DECL_SIZE (decl)
13750 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
13752 tree decl2 = DECL_VALUE_EXPR (decl);
13753 gcc_assert (INDIRECT_REF_P (decl2));
13754 decl2 = TREE_OPERAND (decl2, 0);
13755 gcc_assert (DECL_P (decl2));
13756 tree mem = build_simple_mem_ref (decl2);
13757 OMP_CLAUSE_DECL (clause) = mem;
13758 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
13759 if (gimplify_omp_ctxp->outer_context)
13761 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
13762 omp_notice_variable (ctx, decl2, true);
13763 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
13765 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
13766 OMP_CLAUSE_MAP);
13767 OMP_CLAUSE_DECL (nc) = decl;
13768 OMP_CLAUSE_SIZE (nc) = size_zero_node;
13769 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
13770 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
13771 else
13772 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
13773 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
13774 OMP_CLAUSE_CHAIN (clause) = nc;
13776 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
13777 && omp_privatize_by_reference (decl))
13779 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
13780 OMP_CLAUSE_SIZE (clause)
13781 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
13782 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13783 gimplify_omp_ctxp = ctx->outer_context;
13784 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
13785 pre_p, NULL, is_gimple_val, fb_rvalue);
13786 gimplify_omp_ctxp = ctx;
13787 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
13788 OMP_CLAUSE_MAP);
13789 OMP_CLAUSE_DECL (nc) = decl;
13790 OMP_CLAUSE_SIZE (nc) = size_zero_node;
13791 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
13792 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
13793 OMP_CLAUSE_CHAIN (clause) = nc;
13795 else
13796 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
13798 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
13800 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
13801 OMP_CLAUSE_DECL (nc) = decl;
13802 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
13803 OMP_CLAUSE_CHAIN (nc) = chain;
13804 OMP_CLAUSE_CHAIN (clause) = nc;
13805 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13806 gimplify_omp_ctxp = ctx->outer_context;
13807 lang_hooks.decls.omp_finish_clause (nc, pre_p,
13808 (ctx->region_type & ORT_ACC) != 0);
13809 gimplify_omp_ctxp = ctx;
13811 *list_p = clause;
13812 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13813 gimplify_omp_ctxp = ctx->outer_context;
13814 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
13815 in simd. Those are only added for the local vars inside of simd body
13816 and they don't need to be e.g. default constructible. */
13817 if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
13818 lang_hooks.decls.omp_finish_clause (clause, pre_p,
13819 (ctx->region_type & ORT_ACC) != 0);
13820 if (gimplify_omp_ctxp)
13821 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
13822 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
13823 && DECL_P (OMP_CLAUSE_SIZE (clause)))
13824 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
13825 true);
13826 gimplify_omp_ctxp = ctx;
13827 return 0;
13830 static void
13831 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
13832 enum tree_code code)
13834 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13835 tree *orig_list_p = list_p;
13836 tree c, decl;
13837 bool has_inscan_reductions = false;
13839 if (body)
13841 struct gimplify_omp_ctx *octx;
13842 for (octx = ctx; octx; octx = octx->outer_context)
13843 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
13844 break;
13845 if (octx)
13847 struct walk_stmt_info wi;
13848 memset (&wi, 0, sizeof (wi));
13849 walk_gimple_seq (body, omp_find_stores_stmt,
13850 omp_find_stores_op, &wi);
13854 if (ctx->add_safelen1)
13856 /* If there are VLAs in the body of simd loop, prevent
13857 vectorization. */
13858 gcc_assert (ctx->region_type == ORT_SIMD);
13859 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
13860 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
13861 OMP_CLAUSE_CHAIN (c) = *list_p;
13862 *list_p = c;
13863 list_p = &OMP_CLAUSE_CHAIN (c);
13866 if (ctx->region_type == ORT_WORKSHARE
13867 && ctx->outer_context
13868 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
13870 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
13871 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
13872 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
13874 decl = OMP_CLAUSE_DECL (c);
13875 splay_tree_node n
13876 = splay_tree_lookup (ctx->outer_context->variables,
13877 (splay_tree_key) decl);
13878 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
13879 (splay_tree_key) decl));
13880 omp_add_variable (ctx, decl, n->value);
13881 tree c2 = copy_node (c);
13882 OMP_CLAUSE_CHAIN (c2) = *list_p;
13883 *list_p = c2;
13884 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
13885 continue;
13886 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13887 OMP_CLAUSE_FIRSTPRIVATE);
13888 OMP_CLAUSE_DECL (c2) = decl;
13889 OMP_CLAUSE_CHAIN (c2) = *list_p;
13890 *list_p = c2;
13894 if (code == OMP_TARGET
13895 || code == OMP_TARGET_DATA
13896 || code == OMP_TARGET_ENTER_DATA
13897 || code == OMP_TARGET_EXIT_DATA)
13899 vec<omp_mapping_group> *groups;
13900 groups = omp_gather_mapping_groups (list_p);
13901 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap = NULL;
13903 if (groups)
13905 grpmap = omp_index_mapping_groups (groups);
13907 omp_resolve_clause_dependencies (code, groups, grpmap);
13908 omp_build_struct_sibling_lists (code, ctx->region_type, groups,
13909 &grpmap, list_p);
13911 omp_mapping_group *outlist = NULL;
13913 delete grpmap;
13914 delete groups;
13916 /* Rebuild now we have struct sibling lists. */
13917 groups = omp_gather_mapping_groups (list_p);
13918 grpmap = omp_index_mapping_groups (groups);
13920 bool enter_exit = (code == OMP_TARGET_ENTER_DATA
13921 || code == OMP_TARGET_EXIT_DATA);
13923 outlist = omp_tsort_mapping_groups (groups, grpmap, enter_exit);
13924 outlist = omp_segregate_mapping_groups (outlist);
13925 list_p = omp_reorder_mapping_groups (groups, outlist, list_p);
13927 delete grpmap;
13928 delete groups;
13931 else if (ctx->region_type & ORT_ACC)
13933 vec<omp_mapping_group> *groups;
13934 groups = omp_gather_mapping_groups (list_p);
13935 if (groups)
13937 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
13938 grpmap = omp_index_mapping_groups (groups);
13940 oacc_resolve_clause_dependencies (groups, grpmap);
13941 omp_build_struct_sibling_lists (code, ctx->region_type, groups,
13942 &grpmap, list_p);
13944 delete groups;
13945 delete grpmap;
13949 tree attach_list = NULL_TREE;
13950 tree *attach_tail = &attach_list;
13952 tree *grp_start_p = NULL, grp_end = NULL_TREE;
13954 while ((c = *list_p) != NULL)
13956 splay_tree_node n;
13957 bool remove = false;
13958 bool move_attach = false;
13960 if (grp_end && c == OMP_CLAUSE_CHAIN (grp_end))
13961 grp_end = NULL_TREE;
13963 switch (OMP_CLAUSE_CODE (c))
13965 case OMP_CLAUSE_FIRSTPRIVATE:
13966 if ((ctx->region_type & ORT_TARGET)
13967 && (ctx->region_type & ORT_ACC) == 0
13968 && TYPE_ATOMIC (strip_array_types
13969 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
13971 error_at (OMP_CLAUSE_LOCATION (c),
13972 "%<_Atomic%> %qD in %<firstprivate%> clause on "
13973 "%<target%> construct", OMP_CLAUSE_DECL (c));
13974 remove = true;
13975 break;
13977 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13979 decl = OMP_CLAUSE_DECL (c);
13980 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13981 if ((n->value & GOVD_MAP) != 0)
13983 remove = true;
13984 break;
13986 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
13987 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
13989 /* FALLTHRU */
13990 case OMP_CLAUSE_PRIVATE:
13991 case OMP_CLAUSE_SHARED:
13992 case OMP_CLAUSE_LINEAR:
13993 decl = OMP_CLAUSE_DECL (c);
13994 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13995 remove = !(n->value & GOVD_SEEN);
13996 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
13997 && code == OMP_PARALLEL
13998 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13999 remove = true;
14000 if (! remove)
14002 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
14003 if ((n->value & GOVD_DEBUG_PRIVATE)
14004 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
14006 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
14007 || ((n->value & GOVD_DATA_SHARE_CLASS)
14008 == GOVD_SHARED));
14009 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
14010 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
14012 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
14013 && ctx->has_depend
14014 && DECL_P (decl))
14015 n->value |= GOVD_WRITTEN;
14016 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
14017 && (n->value & GOVD_WRITTEN) == 0
14018 && DECL_P (decl)
14019 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
14020 OMP_CLAUSE_SHARED_READONLY (c) = 1;
14021 else if (DECL_P (decl)
14022 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
14023 && (n->value & GOVD_WRITTEN) != 0)
14024 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14025 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
14026 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
14027 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
14029 else
14030 n->value &= ~GOVD_EXPLICIT;
14031 break;
14033 case OMP_CLAUSE_LASTPRIVATE:
14034 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
14035 accurately reflect the presence of a FIRSTPRIVATE clause. */
14036 decl = OMP_CLAUSE_DECL (c);
14037 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14038 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
14039 = (n->value & GOVD_FIRSTPRIVATE) != 0;
14040 if (code == OMP_DISTRIBUTE
14041 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
14043 remove = true;
14044 error_at (OMP_CLAUSE_LOCATION (c),
14045 "same variable used in %<firstprivate%> and "
14046 "%<lastprivate%> clauses on %<distribute%> "
14047 "construct");
14049 if (!remove
14050 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14051 && DECL_P (decl)
14052 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
14053 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
14054 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
14055 remove = true;
14056 break;
14058 case OMP_CLAUSE_ALIGNED:
14059 decl = OMP_CLAUSE_DECL (c);
14060 if (!is_global_var (decl))
14062 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14063 remove = n == NULL || !(n->value & GOVD_SEEN);
14064 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
14066 struct gimplify_omp_ctx *octx;
14067 if (n != NULL
14068 && (n->value & (GOVD_DATA_SHARE_CLASS
14069 & ~GOVD_FIRSTPRIVATE)))
14070 remove = true;
14071 else
14072 for (octx = ctx->outer_context; octx;
14073 octx = octx->outer_context)
14075 n = splay_tree_lookup (octx->variables,
14076 (splay_tree_key) decl);
14077 if (n == NULL)
14078 continue;
14079 if (n->value & GOVD_LOCAL)
14080 break;
14081 /* We have to avoid assigning a shared variable
14082 to itself when trying to add
14083 __builtin_assume_aligned. */
14084 if (n->value & GOVD_SHARED)
14086 remove = true;
14087 break;
14092 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
14094 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14095 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
14096 remove = true;
14098 break;
14100 case OMP_CLAUSE_HAS_DEVICE_ADDR:
14101 decl = OMP_CLAUSE_DECL (c);
14102 while (INDIRECT_REF_P (decl)
14103 || TREE_CODE (decl) == ARRAY_REF)
14104 decl = TREE_OPERAND (decl, 0);
14105 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14106 remove = n == NULL || !(n->value & GOVD_SEEN);
14107 break;
14109 case OMP_CLAUSE_IS_DEVICE_PTR:
14110 case OMP_CLAUSE_NONTEMPORAL:
14111 decl = OMP_CLAUSE_DECL (c);
14112 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14113 remove = n == NULL || !(n->value & GOVD_SEEN);
14114 break;
14116 case OMP_CLAUSE_MAP:
14117 decl = OMP_CLAUSE_DECL (c);
14118 if (!grp_end)
14120 grp_start_p = list_p;
14121 grp_end = *omp_group_last (grp_start_p);
14123 switch (OMP_CLAUSE_MAP_KIND (c))
14125 case GOMP_MAP_PRESENT_ALLOC:
14126 case GOMP_MAP_PRESENT_TO:
14127 case GOMP_MAP_PRESENT_FROM:
14128 case GOMP_MAP_PRESENT_TOFROM:
14129 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_PRESENT);
14130 break;
14131 default:
14132 break;
14134 switch (code)
14136 case OACC_DATA:
14137 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
14138 break;
14139 /* Fallthrough. */
14140 case OACC_HOST_DATA:
14141 case OACC_ENTER_DATA:
14142 case OACC_EXIT_DATA:
14143 case OMP_TARGET_DATA:
14144 case OMP_TARGET_ENTER_DATA:
14145 case OMP_TARGET_EXIT_DATA:
14146 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
14147 || (OMP_CLAUSE_MAP_KIND (c)
14148 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
14149 /* For target {,enter ,exit }data only the array slice is
14150 mapped, but not the pointer to it. */
14151 remove = true;
14152 if (code == OMP_TARGET_EXIT_DATA
14153 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
14154 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER))
14155 remove = true;
14156 break;
14157 case OMP_TARGET:
14158 break;
14159 default:
14160 break;
14162 if (remove)
14163 break;
14164 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
14166 /* Sanity check: attach/detach map kinds use the size as a bias,
14167 and it's never right to use the decl size for such
14168 mappings. */
14169 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
14170 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
14171 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DETACH
14172 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
14173 && (OMP_CLAUSE_MAP_KIND (c)
14174 != GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
14175 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
14176 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
14178 gimplify_omp_ctxp = ctx->outer_context;
14179 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p, NULL,
14180 is_gimple_val, fb_rvalue) == GS_ERROR)
14182 gimplify_omp_ctxp = ctx;
14183 remove = true;
14184 break;
14186 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
14187 || (OMP_CLAUSE_MAP_KIND (c)
14188 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
14189 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
14190 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
14192 OMP_CLAUSE_SIZE (c)
14193 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
14194 false);
14195 if ((ctx->region_type & ORT_TARGET) != 0)
14196 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
14197 GOVD_FIRSTPRIVATE | GOVD_SEEN);
14199 gimplify_omp_ctxp = ctx;
14200 /* Data clauses associated with reductions must be
14201 compatible with present_or_copy. Warn and adjust the clause
14202 if that is not the case. */
14203 if (ctx->region_type == ORT_ACC_PARALLEL
14204 || ctx->region_type == ORT_ACC_SERIAL)
14206 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
14207 n = NULL;
14209 if (DECL_P (t))
14210 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
14212 if (n && (n->value & GOVD_REDUCTION))
14214 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
14216 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
14217 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
14218 && kind != GOMP_MAP_FORCE_PRESENT
14219 && kind != GOMP_MAP_POINTER)
14221 warning_at (OMP_CLAUSE_LOCATION (c), 0,
14222 "incompatible data clause with reduction "
14223 "on %qE; promoting to %<present_or_copy%>",
14224 DECL_NAME (t));
14225 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
14229 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
14230 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
14231 && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
14233 remove = true;
14234 break;
14236 /* If we have a DECL_VALUE_EXPR (e.g. this is a class member and/or
14237 a variable captured in a lambda closure), look through that now
14238 before the DECL_P check below. (A code other than COMPONENT_REF,
14239 i.e. INDIRECT_REF, will be a VLA/variable-length array
14240 section. A global var may be a variable in a common block. We
14241 don't want to do this here for either of those.) */
14242 if ((ctx->region_type & ORT_ACC) == 0
14243 && DECL_P (decl)
14244 && !is_global_var (decl)
14245 && DECL_HAS_VALUE_EXPR_P (decl)
14246 && TREE_CODE (DECL_VALUE_EXPR (decl)) == COMPONENT_REF)
14247 decl = OMP_CLAUSE_DECL (c) = DECL_VALUE_EXPR (decl);
14248 if (TREE_CODE (decl) == TARGET_EXPR)
14250 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
14251 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
14252 remove = true;
14254 else if (!DECL_P (decl))
14256 if ((ctx->region_type & ORT_TARGET) != 0
14257 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
14259 if (INDIRECT_REF_P (decl)
14260 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
14261 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
14262 == REFERENCE_TYPE))
14263 decl = TREE_OPERAND (decl, 0);
14264 if (TREE_CODE (decl) == COMPONENT_REF)
14266 while (TREE_CODE (decl) == COMPONENT_REF)
14267 decl = TREE_OPERAND (decl, 0);
14268 if (DECL_P (decl))
14270 n = splay_tree_lookup (ctx->variables,
14271 (splay_tree_key) decl);
14272 if (!(n->value & GOVD_SEEN))
14273 remove = true;
14278 tree d = decl, *pd;
14279 if (TREE_CODE (d) == ARRAY_REF)
14281 while (TREE_CODE (d) == ARRAY_REF)
14282 d = TREE_OPERAND (d, 0);
14283 if (TREE_CODE (d) == COMPONENT_REF
14284 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
14285 decl = d;
14287 pd = &OMP_CLAUSE_DECL (c);
14288 if (d == decl
14289 && TREE_CODE (decl) == INDIRECT_REF
14290 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
14291 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
14292 == REFERENCE_TYPE)
14293 && (OMP_CLAUSE_MAP_KIND (c)
14294 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
14296 pd = &TREE_OPERAND (decl, 0);
14297 decl = TREE_OPERAND (decl, 0);
14300 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
14301 switch (code)
14303 case OACC_ENTER_DATA:
14304 case OACC_EXIT_DATA:
14305 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
14306 == ARRAY_TYPE)
14307 remove = true;
14308 else if (code == OACC_ENTER_DATA)
14309 goto change_to_attach;
14310 /* Fallthrough. */
14311 case OMP_TARGET_EXIT_DATA:
14312 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DETACH);
14313 break;
14314 case OACC_UPDATE:
14315 /* An "attach/detach" operation on an update directive
14316 should behave as a GOMP_MAP_ALWAYS_POINTER. Note that
14317 both GOMP_MAP_ATTACH_DETACH and GOMP_MAP_ALWAYS_POINTER
14318 kinds depend on the previous mapping (for non-TARGET
14319 regions). */
14320 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
14321 break;
14322 default:
14323 change_to_attach:
14324 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ATTACH);
14325 if ((ctx->region_type & ORT_TARGET) != 0)
14326 move_attach = true;
14328 else if ((ctx->region_type & ORT_TARGET) != 0
14329 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
14330 || (OMP_CLAUSE_MAP_KIND (c)
14331 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
14332 move_attach = true;
14334 /* If we have e.g. map(struct: *var), don't gimplify the
14335 argument since omp-low.cc wants to see the decl itself. */
14336 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
14337 break;
14339 /* We've already partly gimplified this in
14340 gimplify_scan_omp_clauses. Don't do any more. */
14341 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
14342 break;
14344 gimplify_omp_ctxp = ctx->outer_context;
14345 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
14346 fb_lvalue) == GS_ERROR)
14347 remove = true;
14348 gimplify_omp_ctxp = ctx;
14349 break;
14352 if ((code == OMP_TARGET
14353 || code == OMP_TARGET_DATA
14354 || code == OMP_TARGET_ENTER_DATA
14355 || code == OMP_TARGET_EXIT_DATA)
14356 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
14358 bool firstprivatize = false;
14360 for (struct gimplify_omp_ctx *octx = ctx->outer_context; octx;
14361 octx = octx->outer_context)
14363 splay_tree_node n
14364 = splay_tree_lookup (octx->variables,
14365 (splay_tree_key) OMP_CLAUSE_DECL (c));
14366 /* If this is contained in an outer OpenMP region as a
14367 firstprivate value, remove the attach/detach. */
14368 if (n && (n->value & GOVD_FIRSTPRIVATE))
14370 firstprivatize = true;
14371 break;
14375 enum gomp_map_kind map_kind;
14376 if (firstprivatize)
14377 map_kind = GOMP_MAP_FIRSTPRIVATE_POINTER;
14378 else if (code == OMP_TARGET_EXIT_DATA)
14379 map_kind = GOMP_MAP_DETACH;
14380 else
14381 map_kind = GOMP_MAP_ATTACH;
14382 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
14384 else if ((ctx->region_type & ORT_ACC) != 0
14385 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
14387 enum gomp_map_kind map_kind = (code == OACC_EXIT_DATA
14388 ? GOMP_MAP_DETACH
14389 : GOMP_MAP_ATTACH);
14390 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
14393 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14394 if ((ctx->region_type & ORT_TARGET) != 0
14395 && !(n->value & GOVD_SEEN)
14396 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
14397 && (!is_global_var (decl)
14398 || !lookup_attribute ("omp declare target link",
14399 DECL_ATTRIBUTES (decl))))
14401 remove = true;
14402 /* For struct element mapping, if struct is never referenced
14403 in target block and none of the mapping has always modifier,
14404 remove all the struct element mappings, which immediately
14405 follow the GOMP_MAP_STRUCT map clause. */
14406 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
14407 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
14409 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
14410 while (cnt--)
14411 OMP_CLAUSE_CHAIN (c)
14412 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
14415 else if (DECL_SIZE (decl)
14416 && !poly_int_tree_p (DECL_SIZE (decl))
14417 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
14418 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
14419 && (OMP_CLAUSE_MAP_KIND (c)
14420 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
14422 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
14423 for these, TREE_CODE (DECL_SIZE (decl)) will always be
14424 INTEGER_CST. */
14425 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
14427 tree decl2 = DECL_VALUE_EXPR (decl);
14428 gcc_assert (INDIRECT_REF_P (decl2));
14429 decl2 = TREE_OPERAND (decl2, 0);
14430 gcc_assert (DECL_P (decl2));
14431 tree mem = build_simple_mem_ref (decl2);
14432 OMP_CLAUSE_DECL (c) = mem;
14433 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
14434 if (ctx->outer_context)
14436 omp_notice_variable (ctx->outer_context, decl2, true);
14437 omp_notice_variable (ctx->outer_context,
14438 OMP_CLAUSE_SIZE (c), true);
14440 if (((ctx->region_type & ORT_TARGET) != 0
14441 || !ctx->target_firstprivatize_array_bases)
14442 && ((n->value & GOVD_SEEN) == 0
14443 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
14445 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14446 OMP_CLAUSE_MAP);
14447 OMP_CLAUSE_DECL (nc) = decl;
14448 OMP_CLAUSE_SIZE (nc) = size_zero_node;
14449 if (ctx->target_firstprivatize_array_bases)
14450 OMP_CLAUSE_SET_MAP_KIND (nc,
14451 GOMP_MAP_FIRSTPRIVATE_POINTER);
14452 else
14453 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
14454 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
14455 OMP_CLAUSE_CHAIN (c) = nc;
14456 c = nc;
14459 else
14461 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
14462 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
14463 gcc_assert ((n->value & GOVD_SEEN) == 0
14464 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
14465 == 0));
14468 /* If we have a target region, we can push all the attaches to the
14469 end of the list (we may have standalone "attach" operations
14470 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
14471 the attachment point AND the pointed-to block have been mapped).
14472 If we have something else, e.g. "enter data", we need to keep
14473 "attach" nodes together with the previous node they attach to so
14474 that separate "exit data" operations work properly (see
14475 libgomp/target.c). */
14476 if ((ctx->region_type & ORT_TARGET) != 0
14477 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
14478 || (OMP_CLAUSE_MAP_KIND (c)
14479 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
14480 move_attach = true;
14482 break;
14484 case OMP_CLAUSE_TO:
14485 case OMP_CLAUSE_FROM:
14486 case OMP_CLAUSE__CACHE_:
14487 decl = OMP_CLAUSE_DECL (c);
14488 if (!DECL_P (decl))
14489 break;
14490 if (DECL_SIZE (decl)
14491 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
14493 tree decl2 = DECL_VALUE_EXPR (decl);
14494 gcc_assert (INDIRECT_REF_P (decl2));
14495 decl2 = TREE_OPERAND (decl2, 0);
14496 gcc_assert (DECL_P (decl2));
14497 tree mem = build_simple_mem_ref (decl2);
14498 OMP_CLAUSE_DECL (c) = mem;
14499 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
14500 if (ctx->outer_context)
14502 omp_notice_variable (ctx->outer_context, decl2, true);
14503 omp_notice_variable (ctx->outer_context,
14504 OMP_CLAUSE_SIZE (c), true);
14507 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
14508 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
14509 break;
14511 case OMP_CLAUSE_REDUCTION:
14512 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
14514 decl = OMP_CLAUSE_DECL (c);
14515 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14516 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
14518 remove = true;
14519 error_at (OMP_CLAUSE_LOCATION (c),
14520 "%qD specified in %<inscan%> %<reduction%> clause "
14521 "but not in %<scan%> directive clause", decl);
14522 break;
14524 has_inscan_reductions = true;
14526 /* FALLTHRU */
14527 case OMP_CLAUSE_IN_REDUCTION:
14528 case OMP_CLAUSE_TASK_REDUCTION:
14529 decl = OMP_CLAUSE_DECL (c);
14530 /* OpenACC reductions need a present_or_copy data clause.
14531 Add one if necessary. Emit error when the reduction is private. */
14532 if (ctx->region_type == ORT_ACC_PARALLEL
14533 || ctx->region_type == ORT_ACC_SERIAL)
14535 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14536 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
14538 remove = true;
14539 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
14540 "reduction on %qE", DECL_NAME (decl));
14542 else if ((n->value & GOVD_MAP) == 0)
14544 tree next = OMP_CLAUSE_CHAIN (c);
14545 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
14546 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
14547 OMP_CLAUSE_DECL (nc) = decl;
14548 OMP_CLAUSE_CHAIN (c) = nc;
14549 lang_hooks.decls.omp_finish_clause (nc, pre_p,
14550 (ctx->region_type
14551 & ORT_ACC) != 0);
14552 while (1)
14554 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
14555 if (OMP_CLAUSE_CHAIN (nc) == NULL)
14556 break;
14557 nc = OMP_CLAUSE_CHAIN (nc);
14559 OMP_CLAUSE_CHAIN (nc) = next;
14560 n->value |= GOVD_MAP;
14563 if (DECL_P (decl)
14564 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
14565 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
14566 break;
14568 case OMP_CLAUSE_ALLOCATE:
14569 decl = OMP_CLAUSE_DECL (c);
14570 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
14571 if (n != NULL && !(n->value & GOVD_SEEN))
14573 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
14574 != 0
14575 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
14576 remove = true;
14578 if (!remove
14579 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
14580 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
14581 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
14582 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
14583 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
14585 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
14586 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
14587 if (n == NULL)
14589 enum omp_clause_default_kind default_kind
14590 = ctx->default_kind;
14591 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
14592 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
14593 true);
14594 ctx->default_kind = default_kind;
14596 else
14597 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
14598 true);
14600 break;
14602 case OMP_CLAUSE_COPYIN:
14603 case OMP_CLAUSE_COPYPRIVATE:
14604 case OMP_CLAUSE_IF:
14605 case OMP_CLAUSE_SELF:
14606 case OMP_CLAUSE_NUM_THREADS:
14607 case OMP_CLAUSE_NUM_TEAMS:
14608 case OMP_CLAUSE_THREAD_LIMIT:
14609 case OMP_CLAUSE_DIST_SCHEDULE:
14610 case OMP_CLAUSE_DEVICE:
14611 case OMP_CLAUSE_SCHEDULE:
14612 case OMP_CLAUSE_NOWAIT:
14613 case OMP_CLAUSE_ORDERED:
14614 case OMP_CLAUSE_DEFAULT:
14615 case OMP_CLAUSE_UNTIED:
14616 case OMP_CLAUSE_COLLAPSE:
14617 case OMP_CLAUSE_FINAL:
14618 case OMP_CLAUSE_MERGEABLE:
14619 case OMP_CLAUSE_PROC_BIND:
14620 case OMP_CLAUSE_SAFELEN:
14621 case OMP_CLAUSE_SIMDLEN:
14622 case OMP_CLAUSE_DEPEND:
14623 case OMP_CLAUSE_DOACROSS:
14624 case OMP_CLAUSE_PRIORITY:
14625 case OMP_CLAUSE_GRAINSIZE:
14626 case OMP_CLAUSE_NUM_TASKS:
14627 case OMP_CLAUSE_NOGROUP:
14628 case OMP_CLAUSE_THREADS:
14629 case OMP_CLAUSE_SIMD:
14630 case OMP_CLAUSE_FILTER:
14631 case OMP_CLAUSE_HINT:
14632 case OMP_CLAUSE_DEFAULTMAP:
14633 case OMP_CLAUSE_ORDER:
14634 case OMP_CLAUSE_BIND:
14635 case OMP_CLAUSE_DETACH:
14636 case OMP_CLAUSE_USE_DEVICE_PTR:
14637 case OMP_CLAUSE_USE_DEVICE_ADDR:
14638 case OMP_CLAUSE_ASYNC:
14639 case OMP_CLAUSE_WAIT:
14640 case OMP_CLAUSE_INDEPENDENT:
14641 case OMP_CLAUSE_NUM_GANGS:
14642 case OMP_CLAUSE_NUM_WORKERS:
14643 case OMP_CLAUSE_VECTOR_LENGTH:
14644 case OMP_CLAUSE_GANG:
14645 case OMP_CLAUSE_WORKER:
14646 case OMP_CLAUSE_VECTOR:
14647 case OMP_CLAUSE_AUTO:
14648 case OMP_CLAUSE_SEQ:
14649 case OMP_CLAUSE_TILE:
14650 case OMP_CLAUSE_IF_PRESENT:
14651 case OMP_CLAUSE_FINALIZE:
14652 case OMP_CLAUSE_INCLUSIVE:
14653 case OMP_CLAUSE_EXCLUSIVE:
14654 break;
14656 case OMP_CLAUSE_NOHOST:
14657 default:
14658 gcc_unreachable ();
14661 if (remove)
14662 *list_p = OMP_CLAUSE_CHAIN (c);
14663 else if (move_attach)
14665 /* Remove attach node from here, separate out into its own list. */
14666 *attach_tail = c;
14667 *list_p = OMP_CLAUSE_CHAIN (c);
14668 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
14669 attach_tail = &OMP_CLAUSE_CHAIN (c);
14671 else
14672 list_p = &OMP_CLAUSE_CHAIN (c);
14675 /* Splice attach nodes at the end of the list. */
14676 if (attach_list)
14678 *list_p = attach_list;
14679 list_p = attach_tail;
14682 /* Add in any implicit data sharing. */
14683 struct gimplify_adjust_omp_clauses_data data;
14684 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
14686 /* OpenMP. Implicit clauses are added at the start of the clause list,
14687 but after any non-map clauses. */
14688 tree *implicit_add_list_p = orig_list_p;
14689 while (*implicit_add_list_p
14690 && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
14691 implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
14692 data.list_p = implicit_add_list_p;
14694 else
14695 /* OpenACC. */
14696 data.list_p = list_p;
14697 data.pre_p = pre_p;
14698 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
14700 if (has_inscan_reductions)
14701 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
14702 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14703 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
14705 error_at (OMP_CLAUSE_LOCATION (c),
14706 "%<inscan%> %<reduction%> clause used together with "
14707 "%<linear%> clause for a variable other than loop "
14708 "iterator");
14709 break;
14712 gimplify_omp_ctxp = ctx->outer_context;
14713 delete_omp_context (ctx);
14716 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
14717 -1 if unknown yet (simd is involved, won't be known until vectorization)
14718 and 1 if they do. If SCORES is non-NULL, it should point to an array
14719 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
14720 of the CONSTRUCTS (position -1 if it will never match) followed by
14721 number of constructs in the OpenMP context construct trait. If the
14722 score depends on whether it will be in a declare simd clone or not,
14723 the function returns 2 and there will be two sets of the scores, the first
14724 one for the case that it is not in a declare simd clone, the other
14725 that it is in a declare simd clone. */
14728 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
14729 int *scores)
14731 int matched = 0, cnt = 0;
14732 bool simd_seen = false;
14733 bool target_seen = false;
14734 int declare_simd_cnt = -1;
14735 auto_vec<enum tree_code, 16> codes;
14736 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
14738 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
14739 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
14740 == ORT_TARGET && ctx->code == OMP_TARGET)
14741 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
14742 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
14743 || (ctx->region_type == ORT_SIMD
14744 && ctx->code == OMP_SIMD
14745 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
14747 ++cnt;
14748 if (scores)
14749 codes.safe_push (ctx->code);
14750 else if (matched < nconstructs && ctx->code == constructs[matched])
14752 if (ctx->code == OMP_SIMD)
14754 if (matched)
14755 return 0;
14756 simd_seen = true;
14758 ++matched;
14760 if (ctx->code == OMP_TARGET)
14762 if (scores == NULL)
14763 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
14764 target_seen = true;
14765 break;
14768 else if (ctx->region_type == ORT_WORKSHARE
14769 && ctx->code == OMP_LOOP
14770 && ctx->outer_context
14771 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
14772 && ctx->outer_context->outer_context
14773 && ctx->outer_context->outer_context->code == OMP_LOOP
14774 && ctx->outer_context->outer_context->distribute)
14775 ctx = ctx->outer_context->outer_context;
14776 ctx = ctx->outer_context;
14778 if (!target_seen
14779 && lookup_attribute ("omp declare simd",
14780 DECL_ATTRIBUTES (current_function_decl)))
14782 /* Declare simd is a maybe case, it is supposed to be added only to the
14783 omp-simd-clone.cc added clones and not to the base function. */
14784 declare_simd_cnt = cnt++;
14785 if (scores)
14786 codes.safe_push (OMP_SIMD);
14787 else if (cnt == 0
14788 && constructs[0] == OMP_SIMD)
14790 gcc_assert (matched == 0);
14791 simd_seen = true;
14792 if (++matched == nconstructs)
14793 return -1;
14796 if (tree attr = lookup_attribute ("omp declare variant variant",
14797 DECL_ATTRIBUTES (current_function_decl)))
14799 tree selectors = TREE_VALUE (attr);
14800 int variant_nconstructs = list_length (selectors);
14801 enum tree_code *variant_constructs = NULL;
14802 if (!target_seen && variant_nconstructs)
14804 variant_constructs
14805 = (enum tree_code *) alloca (variant_nconstructs
14806 * sizeof (enum tree_code));
14807 omp_construct_traits_to_codes (selectors, variant_nconstructs,
14808 variant_constructs);
14810 for (int i = 0; i < variant_nconstructs; i++)
14812 ++cnt;
14813 if (scores)
14814 codes.safe_push (variant_constructs[i]);
14815 else if (matched < nconstructs
14816 && variant_constructs[i] == constructs[matched])
14818 if (variant_constructs[i] == OMP_SIMD)
14820 if (matched)
14821 return 0;
14822 simd_seen = true;
14824 ++matched;
14828 if (!target_seen
14829 && lookup_attribute ("omp declare target block",
14830 DECL_ATTRIBUTES (current_function_decl)))
14832 if (scores)
14833 codes.safe_push (OMP_TARGET);
14834 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
14835 ++matched;
14837 if (scores)
14839 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
14841 int j = codes.length () - 1;
14842 for (int i = nconstructs - 1; i >= 0; i--)
14844 while (j >= 0
14845 && (pass != 0 || declare_simd_cnt != j)
14846 && constructs[i] != codes[j])
14847 --j;
14848 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
14849 *scores++ = j - 1;
14850 else
14851 *scores++ = j;
14853 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
14854 ? codes.length () - 1 : codes.length ());
14856 return declare_simd_cnt == -1 ? 1 : 2;
14858 if (matched == nconstructs)
14859 return simd_seen ? -1 : 1;
14860 return 0;
14863 /* Gimplify OACC_CACHE. */
14865 static void
14866 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
14868 tree expr = *expr_p;
14870 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
14871 OACC_CACHE);
14872 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
14873 OACC_CACHE);
14875 /* TODO: Do something sensible with this information. */
14877 *expr_p = NULL_TREE;
14880 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
14881 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
14882 kind. The entry kind will replace the one in CLAUSE, while the exit
14883 kind will be used in a new omp_clause and returned to the caller. */
14885 static tree
14886 gimplify_oacc_declare_1 (tree clause)
14888 HOST_WIDE_INT kind, new_op;
14889 bool ret = false;
14890 tree c = NULL;
14892 kind = OMP_CLAUSE_MAP_KIND (clause);
14894 switch (kind)
14896 case GOMP_MAP_ALLOC:
14897 new_op = GOMP_MAP_RELEASE;
14898 ret = true;
14899 break;
14901 case GOMP_MAP_FROM:
14902 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
14903 new_op = GOMP_MAP_FROM;
14904 ret = true;
14905 break;
14907 case GOMP_MAP_TOFROM:
14908 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
14909 new_op = GOMP_MAP_FROM;
14910 ret = true;
14911 break;
14913 case GOMP_MAP_DEVICE_RESIDENT:
14914 case GOMP_MAP_FORCE_DEVICEPTR:
14915 case GOMP_MAP_FORCE_PRESENT:
14916 case GOMP_MAP_LINK:
14917 case GOMP_MAP_POINTER:
14918 case GOMP_MAP_TO:
14919 break;
14921 default:
14922 gcc_unreachable ();
14923 break;
14926 if (ret)
14928 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
14929 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
14930 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
14933 return c;
14936 /* Gimplify OACC_DECLARE. */
14938 static void
14939 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
14941 tree expr = *expr_p;
14942 gomp_target *stmt;
14943 tree clauses, t, decl;
14945 clauses = OACC_DECLARE_CLAUSES (expr);
14947 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
14948 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
14950 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
14952 decl = OMP_CLAUSE_DECL (t);
14954 if (TREE_CODE (decl) == MEM_REF)
14955 decl = TREE_OPERAND (decl, 0);
14957 if (VAR_P (decl) && !is_oacc_declared (decl))
14959 tree attr = get_identifier ("oacc declare target");
14960 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
14961 DECL_ATTRIBUTES (decl));
14964 if (VAR_P (decl)
14965 && !is_global_var (decl)
14966 && DECL_CONTEXT (decl) == current_function_decl)
14968 tree c = gimplify_oacc_declare_1 (t);
14969 if (c)
14971 if (oacc_declare_returns == NULL)
14972 oacc_declare_returns = new hash_map<tree, tree>;
14974 oacc_declare_returns->put (decl, c);
14978 if (gimplify_omp_ctxp)
14979 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
14982 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
14983 clauses);
14985 gimplify_seq_add_stmt (pre_p, stmt);
14987 *expr_p = NULL_TREE;
14990 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
14991 gimplification of the body, as well as scanning the body for used
14992 variables. We need to do this scan now, because variable-sized
14993 decls will be decomposed during gimplification. */
14995 static void
14996 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
14998 tree expr = *expr_p;
14999 gimple *g;
15000 gimple_seq body = NULL;
15002 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
15003 OMP_PARALLEL_COMBINED (expr)
15004 ? ORT_COMBINED_PARALLEL
15005 : ORT_PARALLEL, OMP_PARALLEL);
15007 push_gimplify_context ();
15009 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
15010 if (gimple_code (g) == GIMPLE_BIND)
15011 pop_gimplify_context (g);
15012 else
15013 pop_gimplify_context (NULL);
15015 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
15016 OMP_PARALLEL);
15018 g = gimple_build_omp_parallel (body,
15019 OMP_PARALLEL_CLAUSES (expr),
15020 NULL_TREE, NULL_TREE);
15021 if (OMP_PARALLEL_COMBINED (expr))
15022 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
15023 gimplify_seq_add_stmt (pre_p, g);
15024 *expr_p = NULL_TREE;
15027 /* Gimplify the contents of an OMP_TASK statement. This involves
15028 gimplification of the body, as well as scanning the body for used
15029 variables. We need to do this scan now, because variable-sized
15030 decls will be decomposed during gimplification. */
15032 static void
15033 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
15035 tree expr = *expr_p;
15036 gimple *g;
15037 gimple_seq body = NULL;
15038 bool nowait = false;
15039 bool has_depend = false;
15041 if (OMP_TASK_BODY (expr) == NULL_TREE)
15043 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15044 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
15046 has_depend = true;
15047 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
15049 error_at (OMP_CLAUSE_LOCATION (c),
15050 "%<mutexinoutset%> kind in %<depend%> clause on a "
15051 "%<taskwait%> construct");
15052 break;
15055 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT)
15056 nowait = true;
15057 if (nowait && !has_depend)
15059 error_at (EXPR_LOCATION (expr),
15060 "%<taskwait%> construct with %<nowait%> clause but no "
15061 "%<depend%> clauses");
15062 *expr_p = NULL_TREE;
15063 return;
15067 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
15068 omp_find_clause (OMP_TASK_CLAUSES (expr),
15069 OMP_CLAUSE_UNTIED)
15070 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
15072 if (OMP_TASK_BODY (expr))
15074 push_gimplify_context ();
15076 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
15077 if (gimple_code (g) == GIMPLE_BIND)
15078 pop_gimplify_context (g);
15079 else
15080 pop_gimplify_context (NULL);
15083 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
15084 OMP_TASK);
15086 g = gimple_build_omp_task (body,
15087 OMP_TASK_CLAUSES (expr),
15088 NULL_TREE, NULL_TREE,
15089 NULL_TREE, NULL_TREE, NULL_TREE);
15090 if (OMP_TASK_BODY (expr) == NULL_TREE)
15091 gimple_omp_task_set_taskwait_p (g, true);
15092 gimplify_seq_add_stmt (pre_p, g);
15093 *expr_p = NULL_TREE;
15096 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
15097 force it into a temporary initialized in PRE_P and add firstprivate clause
15098 to ORIG_FOR_STMT. */
15100 static void
15101 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
15102 tree orig_for_stmt)
15104 if (*tp == NULL || is_gimple_constant (*tp))
15105 return;
15107 if (TREE_CODE (*tp) == SAVE_EXPR)
15108 gimplify_save_expr (tp, pre_p, NULL);
15109 else
15110 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
15111 /* Reference to pointer conversion is considered useless,
15112 but is significant for firstprivate clause. Force it
15113 here. */
15114 if (type
15115 && TREE_CODE (type) == POINTER_TYPE
15116 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
15118 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
15119 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
15120 gimplify_and_add (m, pre_p);
15121 *tp = v;
15124 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
15125 OMP_CLAUSE_DECL (c) = *tp;
15126 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
15127 OMP_FOR_CLAUSES (orig_for_stmt) = c;
15130 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
15131 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
15133 static tree
15134 find_standalone_omp_ordered (tree *tp, int *walk_subtrees, void *)
15136 switch (TREE_CODE (*tp))
15138 case OMP_ORDERED:
15139 if (OMP_ORDERED_BODY (*tp) == NULL_TREE)
15140 return *tp;
15141 break;
15142 case OMP_SIMD:
15143 case OMP_PARALLEL:
15144 case OMP_TARGET:
15145 *walk_subtrees = 0;
15146 break;
15147 default:
15148 break;
15150 return NULL_TREE;
15153 /* Gimplify standalone loop transforming directive which has the
15154 transformations applied already. So, all that is needed is gimplify
15155 the remaining loops as normal loops. */
15157 static enum gimplify_status
15158 gimplify_omp_loop_xform (tree *expr_p, gimple_seq *pre_p)
15160 tree for_stmt = *expr_p;
15162 if (OMP_FOR_PRE_BODY (for_stmt))
15163 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), pre_p);
15165 gimple_seq pre_body = NULL, post_body = NULL;
15166 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15168 if (TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i) == NULL_TREE)
15169 continue;
15170 tree iters = NULL_TREE;
15171 if (i == 0
15172 && TREE_CODE (for_stmt) == OMP_UNROLL
15173 && !omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_PARTIAL))
15175 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_FULL))
15176 iters = omp_loop_number_of_iterations (for_stmt, 0, NULL);
15177 else
15178 iters = build_int_cst (integer_type_node, 8);
15180 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
15181 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15182 tree decl = TREE_OPERAND (t, 0);
15183 gcc_assert (DECL_P (decl));
15184 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
15185 || POINTER_TYPE_P (TREE_TYPE (decl)));
15186 if (DECL_ARTIFICIAL (decl)
15187 && TREE_PRIVATE (t)
15188 && gimplify_omp_ctxp
15189 && gimplify_omp_ctxp->region_type != ORT_NONE)
15191 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15194 splay_tree_node n
15195 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15196 if (n != NULL)
15197 break;
15198 else if (ctx->region_type != ORT_WORKSHARE
15199 && ctx->region_type != ORT_TASKGROUP
15200 && ctx->region_type != ORT_SIMD
15201 && ctx->region_type != ORT_ACC
15202 && !(ctx->region_type & ORT_TARGET_DATA))
15204 omp_add_variable (ctx, decl, GOVD_PRIVATE);
15205 break;
15207 ctx = ctx->outer_context;
15209 while (ctx);
15211 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
15213 gcc_assert (seen_error ());
15214 continue;
15216 gimplify_expr (&TREE_OPERAND (t, 1), pre_p, NULL, is_gimple_val,
15217 fb_rvalue);
15218 gimplify_and_add (t, &pre_body);
15219 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
15220 gcc_assert (TREE_OPERAND (t, 0) == decl);
15221 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
15223 gcc_assert (seen_error ());
15224 continue;
15226 gimplify_expr (&TREE_OPERAND (t, 1), pre_p, NULL, is_gimple_val,
15227 fb_rvalue);
15228 tree l1 = create_artificial_label (UNKNOWN_LOCATION);
15229 tree l2 = create_artificial_label (UNKNOWN_LOCATION);
15230 tree l3 = create_artificial_label (UNKNOWN_LOCATION);
15231 gimplify_seq_add_stmt (&pre_body, gimple_build_goto (l2));
15232 gimplify_seq_add_stmt (&pre_body, gimple_build_label (l1));
15233 gimple_seq this_post_body = NULL;
15234 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
15235 if (TREE_CODE (t) == MODIFY_EXPR)
15237 t = TREE_OPERAND (t, 1);
15238 if (TREE_CODE (t) == PLUS_EXPR
15239 && TREE_OPERAND (t, 1) == decl)
15241 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
15242 TREE_OPERAND (t, 0) = decl;
15244 gimplify_expr (&TREE_OPERAND (t, 1), pre_p, NULL, is_gimple_val,
15245 fb_rvalue);
15247 gimplify_and_add (TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i),
15248 &this_post_body);
15249 gimplify_seq_add_stmt (&this_post_body, gimple_build_label (l2));
15250 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
15251 gcond *cond = NULL;
15252 tree d = decl;
15253 gimplify_expr (&d, &this_post_body, NULL, is_gimple_val, fb_rvalue);
15254 if (iters && tree_fits_uhwi_p (iters))
15256 unsigned HOST_WIDE_INT niters = tree_to_uhwi (iters);
15257 if ((unsigned HOST_WIDE_INT) (int) niters == niters
15258 && (int) niters > 0)
15260 t = build2 (TREE_CODE (t), boolean_type_node, d,
15261 TREE_OPERAND (t, 1));
15262 t = build3 (ANNOTATE_EXPR, TREE_TYPE (t), t,
15263 build_int_cst (integer_type_node,
15264 annot_expr_unroll_kind),
15265 build_int_cst (integer_type_node, niters));
15266 gimplify_expr (&t, &this_post_body, NULL, is_gimple_val,
15267 fb_rvalue);
15268 cond = gimple_build_cond (NE_EXPR, t, boolean_false_node,
15269 l1, l3);
15272 if (cond == NULL)
15273 cond = gimple_build_cond (TREE_CODE (t), d, TREE_OPERAND (t, 1),
15274 l1, l3);
15275 gimplify_seq_add_stmt (&this_post_body, cond);
15276 gimplify_seq_add_stmt (&this_post_body, gimple_build_label (l3));
15277 gimplify_seq_add_seq (&this_post_body, post_body);
15278 post_body = this_post_body;
15280 gimplify_seq_add_seq (pre_p, pre_body);
15281 gimplify_and_add (OMP_FOR_BODY (for_stmt), pre_p);
15282 gimplify_seq_add_seq (pre_p, post_body);
15284 *expr_p = NULL_TREE;
15285 return GS_ALL_DONE;
15288 /* Gimplify the gross structure of an OMP_FOR statement. */
15290 static enum gimplify_status
15291 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
15293 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
15294 enum gimplify_status ret = GS_ALL_DONE;
15295 enum gimplify_status tret;
15296 gomp_for *gfor;
15297 gimple_seq for_body, for_pre_body;
15298 int i;
15299 bitmap has_decl_expr = NULL;
15300 enum omp_region_type ort = ORT_WORKSHARE;
15301 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
15303 orig_for_stmt = for_stmt = *expr_p;
15305 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
15306 != NULL_TREE);
15307 while (OMP_FOR_INIT (for_stmt) == NULL_TREE)
15309 tree *data[4] = { NULL, NULL, NULL, NULL };
15310 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
15311 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
15312 find_combined_omp_for, data, NULL);
15313 if (inner_for_stmt == NULL_TREE)
15315 gcc_assert (seen_error ());
15316 *expr_p = NULL_TREE;
15317 return GS_ERROR;
15319 gcc_assert (inner_for_stmt == *data[3]);
15320 omp_maybe_apply_loop_xforms (data[3],
15321 data[2]
15322 ? OMP_FOR_CLAUSES (*data[2])
15323 : TREE_CODE (for_stmt) == OMP_FOR
15324 ? OMP_FOR_CLAUSES (for_stmt)
15325 : NULL_TREE);
15326 if (inner_for_stmt != *data[3])
15327 continue;
15328 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
15330 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
15331 &OMP_FOR_PRE_BODY (for_stmt));
15332 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
15334 if (OMP_FOR_PRE_BODY (inner_for_stmt))
15336 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
15337 &OMP_FOR_PRE_BODY (for_stmt));
15338 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
15341 if (data[0])
15343 /* We have some statements or variable declarations in between
15344 the composite construct directives. Move them around the
15345 inner_for_stmt. */
15346 data[0] = expr_p;
15347 for (i = 0; i < 3; i++)
15348 if (data[i])
15350 tree t = *data[i];
15351 if (i < 2 && data[i + 1] == &OMP_BODY (t))
15352 data[i + 1] = data[i];
15353 *data[i] = OMP_BODY (t);
15354 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
15355 NULL_TREE, make_node (BLOCK));
15356 OMP_BODY (t) = body;
15357 append_to_statement_list_force (inner_for_stmt,
15358 &BIND_EXPR_BODY (body));
15359 *data[3] = t;
15360 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
15361 gcc_assert (*data[3] == inner_for_stmt);
15363 return GS_OK;
15366 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
15367 if (!loop_p
15368 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
15369 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
15370 i)) == TREE_LIST
15371 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
15372 i)))
15374 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
15375 /* Class iterators aren't allowed on OMP_SIMD, so the only
15376 case we need to solve is distribute parallel for. They are
15377 allowed on the loop construct, but that is already handled
15378 in gimplify_omp_loop. */
15379 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
15380 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
15381 && data[1]);
15382 tree orig_decl = TREE_PURPOSE (orig);
15383 tree last = TREE_VALUE (orig);
15384 tree *pc;
15385 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
15386 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
15387 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
15388 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
15389 && OMP_CLAUSE_DECL (*pc) == orig_decl)
15390 break;
15391 if (*pc == NULL_TREE)
15393 tree *spc;
15394 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
15395 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
15396 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
15397 && OMP_CLAUSE_DECL (*spc) == orig_decl)
15398 break;
15399 if (*spc)
15401 tree c = *spc;
15402 *spc = OMP_CLAUSE_CHAIN (c);
15403 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
15404 *pc = c;
15407 if (*pc == NULL_TREE)
15409 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
15411 /* private clause will appear only on inner_for_stmt.
15412 Change it into firstprivate, and add private clause
15413 on for_stmt. */
15414 tree c = copy_node (*pc);
15415 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
15416 OMP_FOR_CLAUSES (for_stmt) = c;
15417 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
15418 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
15420 else
15422 /* lastprivate clause will appear on both inner_for_stmt
15423 and for_stmt. Add firstprivate clause to
15424 inner_for_stmt. */
15425 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
15426 OMP_CLAUSE_FIRSTPRIVATE);
15427 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
15428 OMP_CLAUSE_CHAIN (c) = *pc;
15429 *pc = c;
15430 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
15432 tree c = build_omp_clause (UNKNOWN_LOCATION,
15433 OMP_CLAUSE_FIRSTPRIVATE);
15434 OMP_CLAUSE_DECL (c) = last;
15435 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
15436 OMP_PARALLEL_CLAUSES (*data[1]) = c;
15437 c = build_omp_clause (UNKNOWN_LOCATION,
15438 *pc ? OMP_CLAUSE_SHARED
15439 : OMP_CLAUSE_FIRSTPRIVATE);
15440 OMP_CLAUSE_DECL (c) = orig_decl;
15441 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
15442 OMP_PARALLEL_CLAUSES (*data[1]) = c;
15444 /* Similarly, take care of C++ range for temporaries, those should
15445 be firstprivate on OMP_PARALLEL if any. */
15446 if (data[1])
15447 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
15448 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
15449 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
15450 i)) == TREE_LIST
15451 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
15452 i)))
15454 tree orig
15455 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
15456 tree v = TREE_CHAIN (orig);
15457 tree c = build_omp_clause (UNKNOWN_LOCATION,
15458 OMP_CLAUSE_FIRSTPRIVATE);
15459 /* First add firstprivate clause for the __for_end artificial
15460 decl. */
15461 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
15462 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
15463 == REFERENCE_TYPE)
15464 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
15465 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
15466 OMP_PARALLEL_CLAUSES (*data[1]) = c;
15467 if (TREE_VEC_ELT (v, 0))
15469 /* And now the same for __for_range artificial decl if it
15470 exists. */
15471 c = build_omp_clause (UNKNOWN_LOCATION,
15472 OMP_CLAUSE_FIRSTPRIVATE);
15473 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
15474 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
15475 == REFERENCE_TYPE)
15476 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
15477 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
15478 OMP_PARALLEL_CLAUSES (*data[1]) = c;
15481 break;
15483 if (OMP_FOR_INIT (for_stmt) != NULL_TREE)
15485 omp_maybe_apply_loop_xforms (expr_p, NULL_TREE);
15486 if (*expr_p != for_stmt)
15487 return GS_OK;
15490 switch (TREE_CODE (for_stmt))
15492 case OMP_FOR:
15493 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
15495 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15496 OMP_CLAUSE_SCHEDULE))
15497 error_at (EXPR_LOCATION (for_stmt),
15498 "%qs clause may not appear on non-rectangular %qs",
15499 "schedule", lang_GNU_Fortran () ? "do" : "for");
15500 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
15501 error_at (EXPR_LOCATION (for_stmt),
15502 "%qs clause may not appear on non-rectangular %qs",
15503 "ordered", lang_GNU_Fortran () ? "do" : "for");
15505 break;
15506 case OMP_DISTRIBUTE:
15507 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
15508 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15509 OMP_CLAUSE_DIST_SCHEDULE))
15510 error_at (EXPR_LOCATION (for_stmt),
15511 "%qs clause may not appear on non-rectangular %qs",
15512 "dist_schedule", "distribute");
15513 break;
15514 case OACC_LOOP:
15515 ort = ORT_ACC;
15516 break;
15517 case OMP_TASKLOOP:
15518 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
15520 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15521 OMP_CLAUSE_GRAINSIZE))
15522 error_at (EXPR_LOCATION (for_stmt),
15523 "%qs clause may not appear on non-rectangular %qs",
15524 "grainsize", "taskloop");
15525 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15526 OMP_CLAUSE_NUM_TASKS))
15527 error_at (EXPR_LOCATION (for_stmt),
15528 "%qs clause may not appear on non-rectangular %qs",
15529 "num_tasks", "taskloop");
15531 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
15532 ort = ORT_UNTIED_TASKLOOP;
15533 else
15534 ort = ORT_TASKLOOP;
15535 break;
15536 case OMP_SIMD:
15537 ort = ORT_SIMD;
15538 break;
15539 case OMP_TILE:
15540 case OMP_UNROLL:
15541 gcc_assert (inner_for_stmt == NULL_TREE);
15542 return gimplify_omp_loop_xform (expr_p, pre_p);
15543 default:
15544 gcc_unreachable ();
15547 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
15548 clause for the IV. */
15549 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
15551 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
15552 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15553 decl = TREE_OPERAND (t, 0);
15554 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
15555 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
15556 && OMP_CLAUSE_DECL (c) == decl)
15558 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
15559 break;
15563 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
15564 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
15565 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
15566 ? OMP_LOOP : TREE_CODE (for_stmt));
15568 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
15569 gimplify_omp_ctxp->distribute = true;
15571 /* Handle OMP_FOR_INIT. */
15572 for_pre_body = NULL;
15573 if ((ort == ORT_SIMD
15574 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
15575 && OMP_FOR_PRE_BODY (for_stmt))
15577 has_decl_expr = BITMAP_ALLOC (NULL);
15578 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
15579 && VAR_P (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))))
15581 t = OMP_FOR_PRE_BODY (for_stmt);
15582 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
15584 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
15586 tree_stmt_iterator si;
15587 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
15588 tsi_next (&si))
15590 t = tsi_stmt (si);
15591 if (TREE_CODE (t) == DECL_EXPR
15592 && VAR_P (DECL_EXPR_DECL (t)))
15593 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
15597 if (OMP_FOR_PRE_BODY (for_stmt))
15599 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
15600 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
15601 else
15603 struct gimplify_omp_ctx ctx;
15604 memset (&ctx, 0, sizeof (ctx));
15605 ctx.region_type = ORT_NONE;
15606 gimplify_omp_ctxp = &ctx;
15607 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
15608 gimplify_omp_ctxp = NULL;
15611 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
15613 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
15614 for_stmt = inner_for_stmt;
15616 /* For taskloop, need to gimplify the start, end and step before the
15617 taskloop, outside of the taskloop omp context. */
15618 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
15620 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15622 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
15623 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
15624 ? pre_p : &for_pre_body);
15625 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
15626 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
15628 tree v = TREE_OPERAND (t, 1);
15629 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
15630 for_pre_p, orig_for_stmt);
15631 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
15632 for_pre_p, orig_for_stmt);
15634 else
15635 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
15636 orig_for_stmt);
15638 /* Handle OMP_FOR_COND. */
15639 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
15640 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
15642 tree v = TREE_OPERAND (t, 1);
15643 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
15644 for_pre_p, orig_for_stmt);
15645 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
15646 for_pre_p, orig_for_stmt);
15648 else
15649 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
15650 orig_for_stmt);
15652 /* Handle OMP_FOR_INCR. */
15653 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
15654 if (TREE_CODE (t) == MODIFY_EXPR)
15656 decl = TREE_OPERAND (t, 0);
15657 t = TREE_OPERAND (t, 1);
15658 tree *tp = &TREE_OPERAND (t, 1);
15659 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
15660 tp = &TREE_OPERAND (t, 0);
15662 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
15663 orig_for_stmt);
15667 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
15668 OMP_TASKLOOP);
15671 if (orig_for_stmt != for_stmt)
15672 gimplify_omp_ctxp->combined_loop = true;
15674 for_body = NULL;
15675 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
15676 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
15677 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
15678 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
15680 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
15681 bool is_doacross = false;
15682 if (c && walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt),
15683 find_standalone_omp_ordered, NULL))
15685 OMP_CLAUSE_ORDERED_DOACROSS (c) = 1;
15686 is_doacross = true;
15687 int len = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
15688 gimplify_omp_ctxp->loop_iter_var.create (len * 2);
15689 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
15690 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LINEAR)
15692 error_at (OMP_CLAUSE_LOCATION (*pc),
15693 "%<linear%> clause may not be specified together "
15694 "with %<ordered%> clause if stand-alone %<ordered%> "
15695 "construct is nested in it");
15696 *pc = OMP_CLAUSE_CHAIN (*pc);
15698 else
15699 pc = &OMP_CLAUSE_CHAIN (*pc);
15701 int collapse = 1, tile = 0;
15702 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
15703 if (c)
15704 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
15705 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
15706 if (c)
15707 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
15708 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
15709 hash_set<tree> *allocate_uids = NULL;
15710 if (c)
15712 allocate_uids = new hash_set<tree>;
15713 for (; c; c = OMP_CLAUSE_CHAIN (c))
15714 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
15715 allocate_uids->add (OMP_CLAUSE_DECL (c));
15717 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15719 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
15720 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15721 decl = TREE_OPERAND (t, 0);
15722 gcc_assert (DECL_P (decl));
15723 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
15724 || POINTER_TYPE_P (TREE_TYPE (decl)));
15725 if (is_doacross)
15727 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
15729 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15730 if (TREE_CODE (orig_decl) == TREE_LIST)
15732 orig_decl = TREE_PURPOSE (orig_decl);
15733 if (!orig_decl)
15734 orig_decl = decl;
15736 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
15738 else
15739 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
15740 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
15743 if (for_stmt == orig_for_stmt)
15745 tree orig_decl = decl;
15746 if (OMP_FOR_ORIG_DECLS (for_stmt))
15748 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15749 if (TREE_CODE (orig_decl) == TREE_LIST)
15751 orig_decl = TREE_PURPOSE (orig_decl);
15752 if (!orig_decl)
15753 orig_decl = decl;
15756 if (is_global_var (orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
15757 error_at (EXPR_LOCATION (for_stmt),
15758 "threadprivate iteration variable %qD", orig_decl);
15761 /* Make sure the iteration variable is private. */
15762 tree c = NULL_TREE;
15763 tree c2 = NULL_TREE;
15764 if (orig_for_stmt != for_stmt)
15766 /* Preserve this information until we gimplify the inner simd. */
15767 if (has_decl_expr
15768 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
15769 TREE_PRIVATE (t) = 1;
15771 else if (ort == ORT_SIMD)
15773 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15774 (splay_tree_key) decl);
15775 omp_is_private (gimplify_omp_ctxp, decl,
15776 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
15777 != 1));
15778 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
15780 omp_notice_variable (gimplify_omp_ctxp, decl, true);
15781 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
15782 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15783 OMP_CLAUSE_LASTPRIVATE);
15784 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
15785 OMP_CLAUSE_LASTPRIVATE))
15786 if (OMP_CLAUSE_DECL (c3) == decl)
15788 warning_at (OMP_CLAUSE_LOCATION (c3), OPT_Wopenmp,
15789 "conditional %<lastprivate%> on loop "
15790 "iterator %qD ignored", decl);
15791 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
15792 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
15795 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
15797 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
15798 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
15799 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
15800 if ((has_decl_expr
15801 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
15802 || TREE_PRIVATE (t))
15804 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
15805 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
15807 struct gimplify_omp_ctx *outer
15808 = gimplify_omp_ctxp->outer_context;
15809 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
15811 if (outer->region_type == ORT_WORKSHARE
15812 && outer->combined_loop)
15814 n = splay_tree_lookup (outer->variables,
15815 (splay_tree_key)decl);
15816 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
15818 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
15819 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
15821 else
15823 struct gimplify_omp_ctx *octx = outer->outer_context;
15824 if (octx
15825 && octx->region_type == ORT_COMBINED_PARALLEL
15826 && octx->outer_context
15827 && (octx->outer_context->region_type
15828 == ORT_WORKSHARE)
15829 && octx->outer_context->combined_loop)
15831 octx = octx->outer_context;
15832 n = splay_tree_lookup (octx->variables,
15833 (splay_tree_key)decl);
15834 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
15836 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
15837 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
15844 OMP_CLAUSE_DECL (c) = decl;
15845 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
15846 OMP_FOR_CLAUSES (for_stmt) = c;
15847 omp_add_variable (gimplify_omp_ctxp, decl, flags);
15848 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
15849 omp_lastprivate_for_combined_outer_constructs (outer, decl,
15850 true);
15852 else
15854 bool lastprivate
15855 = (!has_decl_expr
15856 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
15857 if (TREE_PRIVATE (t))
15858 lastprivate = false;
15859 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
15861 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15862 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
15863 lastprivate = false;
15866 struct gimplify_omp_ctx *outer
15867 = gimplify_omp_ctxp->outer_context;
15868 if (outer && lastprivate)
15869 omp_lastprivate_for_combined_outer_constructs (outer, decl,
15870 true);
15872 c = build_omp_clause (input_location,
15873 lastprivate ? OMP_CLAUSE_LASTPRIVATE
15874 : OMP_CLAUSE_PRIVATE);
15875 OMP_CLAUSE_DECL (c) = decl;
15876 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
15877 OMP_FOR_CLAUSES (for_stmt) = c;
15878 omp_add_variable (gimplify_omp_ctxp, decl,
15879 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
15880 | GOVD_EXPLICIT | GOVD_SEEN);
15881 c = NULL_TREE;
15884 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
15886 omp_notice_variable (gimplify_omp_ctxp, decl, true);
15887 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15888 (splay_tree_key) decl);
15889 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
15890 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
15891 OMP_CLAUSE_LASTPRIVATE);
15892 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
15893 OMP_CLAUSE_LASTPRIVATE))
15894 if (OMP_CLAUSE_DECL (c3) == decl)
15896 warning_at (OMP_CLAUSE_LOCATION (c3), OPT_Wopenmp,
15897 "conditional %<lastprivate%> on loop "
15898 "iterator %qD ignored", decl);
15899 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
15900 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
15903 else
15904 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
15906 /* If DECL is not a gimple register, create a temporary variable to act
15907 as an iteration counter. This is valid, since DECL cannot be
15908 modified in the body of the loop. Similarly for any iteration vars
15909 in simd with collapse > 1 where the iterator vars must be
15910 lastprivate. And similarly for vars mentioned in allocate clauses. */
15911 if (orig_for_stmt != for_stmt)
15912 var = decl;
15913 else if (!is_gimple_reg (decl)
15914 || (ort == ORT_SIMD
15915 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
15916 || (allocate_uids && allocate_uids->contains (decl)))
15918 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15919 /* Make sure omp_add_variable is not called on it prematurely.
15920 We call it ourselves a few lines later. */
15921 gimplify_omp_ctxp = NULL;
15922 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
15923 gimplify_omp_ctxp = ctx;
15924 TREE_OPERAND (t, 0) = var;
15926 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
15928 if (ort == ORT_SIMD
15929 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
15931 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
15932 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
15933 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
15934 OMP_CLAUSE_DECL (c2) = var;
15935 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
15936 OMP_FOR_CLAUSES (for_stmt) = c2;
15937 omp_add_variable (gimplify_omp_ctxp, var,
15938 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
15939 if (c == NULL_TREE)
15941 c = c2;
15942 c2 = NULL_TREE;
15945 else
15946 omp_add_variable (gimplify_omp_ctxp, var,
15947 GOVD_PRIVATE | GOVD_SEEN);
15949 else
15950 var = decl;
15952 gimplify_omp_ctxp->in_for_exprs = true;
15953 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
15955 tree lb = TREE_OPERAND (t, 1);
15956 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
15957 is_gimple_val, fb_rvalue, false);
15958 ret = MIN (ret, tret);
15959 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
15960 is_gimple_val, fb_rvalue, false);
15962 else
15963 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
15964 is_gimple_val, fb_rvalue, false);
15965 gimplify_omp_ctxp->in_for_exprs = false;
15966 ret = MIN (ret, tret);
15967 if (ret == GS_ERROR)
15968 return ret;
15970 /* Handle OMP_FOR_COND. */
15971 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
15972 gcc_assert (COMPARISON_CLASS_P (t));
15973 gcc_assert (TREE_OPERAND (t, 0) == decl);
15975 gimplify_omp_ctxp->in_for_exprs = true;
15976 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
15978 tree ub = TREE_OPERAND (t, 1);
15979 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
15980 is_gimple_val, fb_rvalue, false);
15981 ret = MIN (ret, tret);
15982 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
15983 is_gimple_val, fb_rvalue, false);
15985 else
15986 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
15987 is_gimple_val, fb_rvalue, false);
15988 gimplify_omp_ctxp->in_for_exprs = false;
15989 ret = MIN (ret, tret);
15991 /* Handle OMP_FOR_INCR. */
15992 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
15993 switch (TREE_CODE (t))
15995 case PREINCREMENT_EXPR:
15996 case POSTINCREMENT_EXPR:
15998 tree decl = TREE_OPERAND (t, 0);
15999 /* c_omp_for_incr_canonicalize_ptr() should have been
16000 called to massage things appropriately. */
16001 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
16003 if (orig_for_stmt != for_stmt)
16004 break;
16005 t = build_int_cst (TREE_TYPE (decl), 1);
16006 if (c)
16007 OMP_CLAUSE_LINEAR_STEP (c) = t;
16008 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
16009 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
16010 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
16011 break;
16014 case PREDECREMENT_EXPR:
16015 case POSTDECREMENT_EXPR:
16016 /* c_omp_for_incr_canonicalize_ptr() should have been
16017 called to massage things appropriately. */
16018 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
16019 if (orig_for_stmt != for_stmt)
16020 break;
16021 t = build_int_cst (TREE_TYPE (decl), -1);
16022 if (c)
16023 OMP_CLAUSE_LINEAR_STEP (c) = t;
16024 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
16025 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
16026 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
16027 break;
16029 case MODIFY_EXPR:
16030 gcc_assert (TREE_OPERAND (t, 0) == decl);
16031 TREE_OPERAND (t, 0) = var;
16033 t = TREE_OPERAND (t, 1);
16034 switch (TREE_CODE (t))
16036 case PLUS_EXPR:
16037 if (TREE_OPERAND (t, 1) == decl)
16039 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
16040 TREE_OPERAND (t, 0) = var;
16041 break;
16044 /* Fallthru. */
16045 case MINUS_EXPR:
16046 case POINTER_PLUS_EXPR:
16047 gcc_assert (TREE_OPERAND (t, 0) == decl);
16048 TREE_OPERAND (t, 0) = var;
16049 break;
16050 default:
16051 gcc_unreachable ();
16054 gimplify_omp_ctxp->in_for_exprs = true;
16055 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
16056 is_gimple_val, fb_rvalue, false);
16057 ret = MIN (ret, tret);
16058 if (c)
16060 tree step = TREE_OPERAND (t, 1);
16061 tree stept = TREE_TYPE (decl);
16062 if (POINTER_TYPE_P (stept))
16063 stept = sizetype;
16064 step = fold_convert (stept, step);
16065 if (TREE_CODE (t) == MINUS_EXPR)
16066 step = fold_build1 (NEGATE_EXPR, stept, step);
16067 OMP_CLAUSE_LINEAR_STEP (c) = step;
16068 if (step != TREE_OPERAND (t, 1))
16070 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
16071 &for_pre_body, NULL,
16072 is_gimple_val, fb_rvalue, false);
16073 ret = MIN (ret, tret);
16076 gimplify_omp_ctxp->in_for_exprs = false;
16077 break;
16079 default:
16080 gcc_unreachable ();
16083 if (c2)
16085 gcc_assert (c);
16086 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
16089 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
16091 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
16092 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
16093 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
16094 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
16095 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
16096 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
16097 && OMP_CLAUSE_DECL (c) == decl)
16099 if (is_doacross && (collapse == 1 || i >= collapse))
16100 t = var;
16101 else
16103 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
16104 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
16105 gcc_assert (TREE_OPERAND (t, 0) == var);
16106 t = TREE_OPERAND (t, 1);
16107 gcc_assert (TREE_CODE (t) == PLUS_EXPR
16108 || TREE_CODE (t) == MINUS_EXPR
16109 || TREE_CODE (t) == POINTER_PLUS_EXPR);
16110 gcc_assert (TREE_OPERAND (t, 0) == var);
16111 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
16112 is_doacross ? var : decl,
16113 TREE_OPERAND (t, 1));
16115 gimple_seq *seq;
16116 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
16117 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
16118 else
16119 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
16120 push_gimplify_context ();
16121 gimplify_assign (decl, t, seq);
16122 gimple *bind = NULL;
16123 if (gimplify_ctxp->temps)
16125 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
16126 *seq = NULL;
16127 gimplify_seq_add_stmt (seq, bind);
16129 pop_gimplify_context (bind);
16132 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
16133 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
16135 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
16136 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
16137 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
16138 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
16139 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
16140 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
16141 gcc_assert (COMPARISON_CLASS_P (t));
16142 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
16143 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
16144 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
16148 BITMAP_FREE (has_decl_expr);
16149 delete allocate_uids;
16151 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
16152 || (loop_p && orig_for_stmt == for_stmt))
16154 push_gimplify_context ();
16155 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
16157 OMP_FOR_BODY (orig_for_stmt)
16158 = build3 (BIND_EXPR, void_type_node, NULL,
16159 OMP_FOR_BODY (orig_for_stmt), NULL);
16160 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
16164 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
16165 &for_body);
16167 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
16168 || (loop_p && orig_for_stmt == for_stmt))
16170 if (gimple_code (g) == GIMPLE_BIND)
16171 pop_gimplify_context (g);
16172 else
16173 pop_gimplify_context (NULL);
16176 if (orig_for_stmt != for_stmt)
16177 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
16179 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
16180 decl = TREE_OPERAND (t, 0);
16181 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
16182 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
16183 gimplify_omp_ctxp = ctx->outer_context;
16184 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
16185 gimplify_omp_ctxp = ctx;
16186 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
16187 TREE_OPERAND (t, 0) = var;
16188 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
16189 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
16190 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
16191 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
16192 for (int j = i + 1;
16193 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
16195 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
16196 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
16197 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
16198 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
16200 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
16201 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
16203 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
16204 gcc_assert (COMPARISON_CLASS_P (t));
16205 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
16206 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
16208 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
16209 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
16214 gimplify_adjust_omp_clauses (pre_p, for_body,
16215 &OMP_FOR_CLAUSES (orig_for_stmt),
16216 TREE_CODE (orig_for_stmt));
16218 int kind;
16219 switch (TREE_CODE (orig_for_stmt))
16221 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
16222 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
16223 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
16224 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
16225 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
16226 default:
16227 gcc_unreachable ();
16229 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
16231 gimplify_seq_add_seq (pre_p, for_pre_body);
16232 for_pre_body = NULL;
16234 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
16235 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
16236 for_pre_body);
16237 if (orig_for_stmt != for_stmt)
16238 gimple_omp_for_set_combined_p (gfor, true);
16239 if (gimplify_omp_ctxp
16240 && (gimplify_omp_ctxp->combined_loop
16241 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
16242 && gimplify_omp_ctxp->outer_context
16243 && gimplify_omp_ctxp->outer_context->combined_loop)))
16245 gimple_omp_for_set_combined_into_p (gfor, true);
16246 if (gimplify_omp_ctxp->combined_loop)
16247 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
16248 else
16249 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
16252 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
16254 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
16255 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
16256 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
16257 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
16258 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
16259 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
16260 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
16261 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
16264 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
16265 constructs with GIMPLE_OMP_TASK sandwiched in between them.
16266 The outer taskloop stands for computing the number of iterations,
16267 counts for collapsed loops and holding taskloop specific clauses.
16268 The task construct stands for the effect of data sharing on the
16269 explicit task it creates and the inner taskloop stands for expansion
16270 of the static loop inside of the explicit task construct. */
16271 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
16273 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
16274 tree task_clauses = NULL_TREE;
16275 tree c = *gfor_clauses_ptr;
16276 tree *gtask_clauses_ptr = &task_clauses;
16277 tree outer_for_clauses = NULL_TREE;
16278 tree *gforo_clauses_ptr = &outer_for_clauses;
16279 bitmap lastprivate_uids = NULL;
16280 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
16282 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
16283 if (c)
16285 lastprivate_uids = BITMAP_ALLOC (NULL);
16286 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
16287 OMP_CLAUSE_LASTPRIVATE))
16288 bitmap_set_bit (lastprivate_uids,
16289 DECL_UID (OMP_CLAUSE_DECL (c)));
16291 c = *gfor_clauses_ptr;
16293 for (; c; c = OMP_CLAUSE_CHAIN (c))
16294 switch (OMP_CLAUSE_CODE (c))
16296 /* These clauses are allowed on task, move them there. */
16297 case OMP_CLAUSE_SHARED:
16298 case OMP_CLAUSE_FIRSTPRIVATE:
16299 case OMP_CLAUSE_DEFAULT:
16300 case OMP_CLAUSE_IF:
16301 case OMP_CLAUSE_UNTIED:
16302 case OMP_CLAUSE_FINAL:
16303 case OMP_CLAUSE_MERGEABLE:
16304 case OMP_CLAUSE_PRIORITY:
16305 case OMP_CLAUSE_REDUCTION:
16306 case OMP_CLAUSE_IN_REDUCTION:
16307 *gtask_clauses_ptr = c;
16308 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16309 break;
16310 case OMP_CLAUSE_PRIVATE:
16311 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
16313 /* We want private on outer for and firstprivate
16314 on task. */
16315 *gtask_clauses_ptr
16316 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
16317 OMP_CLAUSE_FIRSTPRIVATE);
16318 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
16319 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
16320 openacc);
16321 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
16322 *gforo_clauses_ptr = c;
16323 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16325 else
16327 *gtask_clauses_ptr = c;
16328 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16330 break;
16331 /* These clauses go into outer taskloop clauses. */
16332 case OMP_CLAUSE_GRAINSIZE:
16333 case OMP_CLAUSE_NUM_TASKS:
16334 case OMP_CLAUSE_NOGROUP:
16335 *gforo_clauses_ptr = c;
16336 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16337 break;
16338 /* Collapse clause we duplicate on both taskloops. */
16339 case OMP_CLAUSE_COLLAPSE:
16340 *gfor_clauses_ptr = c;
16341 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16342 *gforo_clauses_ptr = copy_node (c);
16343 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
16344 break;
16345 /* For lastprivate, keep the clause on inner taskloop, and add
16346 a shared clause on task. If the same decl is also firstprivate,
16347 add also firstprivate clause on the inner taskloop. */
16348 case OMP_CLAUSE_LASTPRIVATE:
16349 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
16351 /* For taskloop C++ lastprivate IVs, we want:
16352 1) private on outer taskloop
16353 2) firstprivate and shared on task
16354 3) lastprivate on inner taskloop */
16355 *gtask_clauses_ptr
16356 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
16357 OMP_CLAUSE_FIRSTPRIVATE);
16358 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
16359 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
16360 openacc);
16361 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
16362 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
16363 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
16364 OMP_CLAUSE_PRIVATE);
16365 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
16366 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
16367 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
16368 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
16370 *gfor_clauses_ptr = c;
16371 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16372 *gtask_clauses_ptr
16373 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
16374 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
16375 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
16376 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
16377 gtask_clauses_ptr
16378 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
16379 break;
16380 /* Allocate clause we duplicate on task and inner taskloop
16381 if the decl is lastprivate, otherwise just put on task. */
16382 case OMP_CLAUSE_ALLOCATE:
16383 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
16384 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
16386 /* Additionally, put firstprivate clause on task
16387 for the allocator if it is not constant. */
16388 *gtask_clauses_ptr
16389 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
16390 OMP_CLAUSE_FIRSTPRIVATE);
16391 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
16392 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
16393 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
16395 if (lastprivate_uids
16396 && bitmap_bit_p (lastprivate_uids,
16397 DECL_UID (OMP_CLAUSE_DECL (c))))
16399 *gfor_clauses_ptr = c;
16400 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16401 *gtask_clauses_ptr = copy_node (c);
16402 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
16404 else
16406 *gtask_clauses_ptr = c;
16407 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
16409 break;
16410 default:
16411 gcc_unreachable ();
16413 *gfor_clauses_ptr = NULL_TREE;
16414 *gtask_clauses_ptr = NULL_TREE;
16415 *gforo_clauses_ptr = NULL_TREE;
16416 BITMAP_FREE (lastprivate_uids);
16417 gimple_set_location (gfor, input_location);
16418 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
16419 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
16420 NULL_TREE, NULL_TREE, NULL_TREE);
16421 gimple_set_location (g, input_location);
16422 gimple_omp_task_set_taskloop_p (g, true);
16423 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
16424 gomp_for *gforo
16425 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
16426 gimple_omp_for_collapse (gfor),
16427 gimple_omp_for_pre_body (gfor));
16428 gimple_omp_for_set_pre_body (gfor, NULL);
16429 gimple_omp_for_set_combined_p (gforo, true);
16430 gimple_omp_for_set_combined_into_p (gfor, true);
16431 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
16433 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
16434 tree v = create_tmp_var (type);
16435 gimple_omp_for_set_index (gforo, i, v);
16436 t = unshare_expr (gimple_omp_for_initial (gfor, i));
16437 gimple_omp_for_set_initial (gforo, i, t);
16438 gimple_omp_for_set_cond (gforo, i,
16439 gimple_omp_for_cond (gfor, i));
16440 t = unshare_expr (gimple_omp_for_final (gfor, i));
16441 gimple_omp_for_set_final (gforo, i, t);
16442 t = unshare_expr (gimple_omp_for_incr (gfor, i));
16443 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
16444 TREE_OPERAND (t, 0) = v;
16445 gimple_omp_for_set_incr (gforo, i, t);
16446 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
16447 OMP_CLAUSE_DECL (t) = v;
16448 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
16449 gimple_omp_for_set_clauses (gforo, t);
16450 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
16452 tree *p1 = NULL, *p2 = NULL;
16453 t = gimple_omp_for_initial (gforo, i);
16454 if (TREE_CODE (t) == TREE_VEC)
16455 p1 = &TREE_VEC_ELT (t, 0);
16456 t = gimple_omp_for_final (gforo, i);
16457 if (TREE_CODE (t) == TREE_VEC)
16459 if (p1)
16460 p2 = &TREE_VEC_ELT (t, 0);
16461 else
16462 p1 = &TREE_VEC_ELT (t, 0);
16464 if (p1)
16466 int j;
16467 for (j = 0; j < i; j++)
16468 if (*p1 == gimple_omp_for_index (gfor, j))
16470 *p1 = gimple_omp_for_index (gforo, j);
16471 if (p2)
16472 *p2 = *p1;
16473 break;
16475 gcc_assert (j < i);
16479 gimplify_seq_add_stmt (pre_p, gforo);
16481 else
16482 gimplify_seq_add_stmt (pre_p, gfor);
16484 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
16486 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
16487 unsigned lastprivate_conditional = 0;
16488 while (ctx
16489 && (ctx->region_type == ORT_TARGET_DATA
16490 || ctx->region_type == ORT_TASKGROUP))
16491 ctx = ctx->outer_context;
16492 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
16493 for (tree c = gimple_omp_for_clauses (gfor);
16494 c; c = OMP_CLAUSE_CHAIN (c))
16495 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
16496 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
16497 ++lastprivate_conditional;
16498 if (lastprivate_conditional)
16500 struct omp_for_data fd;
16501 omp_extract_for_data (gfor, &fd, NULL);
16502 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
16503 lastprivate_conditional);
16504 tree var = create_tmp_var_raw (type);
16505 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
16506 OMP_CLAUSE_DECL (c) = var;
16507 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
16508 gimple_omp_for_set_clauses (gfor, c);
16509 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
16512 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
16514 unsigned lastprivate_conditional = 0;
16515 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
16516 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
16517 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
16518 ++lastprivate_conditional;
16519 if (lastprivate_conditional)
16521 struct omp_for_data fd;
16522 omp_extract_for_data (gfor, &fd, NULL);
16523 tree type = unsigned_type_for (fd.iter_type);
16524 while (lastprivate_conditional--)
16526 tree c = build_omp_clause (UNKNOWN_LOCATION,
16527 OMP_CLAUSE__CONDTEMP_);
16528 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
16529 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
16530 gimple_omp_for_set_clauses (gfor, c);
16535 if (ret != GS_ALL_DONE)
16536 return GS_ERROR;
16537 *expr_p = NULL_TREE;
16538 return GS_ALL_DONE;
16541 /* Helper for gimplify_omp_loop, called through walk_tree. */
16543 static tree
16544 note_no_context_vars (tree *tp, int *, void *data)
16546 if (VAR_P (*tp)
16547 && DECL_CONTEXT (*tp) == NULL_TREE
16548 && !is_global_var (*tp))
16550 vec<tree> *d = (vec<tree> *) data;
16551 d->safe_push (*tp);
16552 DECL_CONTEXT (*tp) = current_function_decl;
16554 return NULL_TREE;
16557 /* Gimplify the gross structure of an OMP_LOOP statement. */
16559 static enum gimplify_status
16560 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
16562 tree for_stmt = *expr_p;
16563 tree clauses = OMP_FOR_CLAUSES (for_stmt);
16564 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
16565 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
16566 int i;
16568 omp_maybe_apply_loop_xforms (expr_p, NULL_TREE);
16569 if (*expr_p != for_stmt)
16570 return GS_OK;
16572 /* If order is not present, the behavior is as if order(concurrent)
16573 appeared. */
16574 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
16575 if (order == NULL_TREE)
16577 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
16578 OMP_CLAUSE_CHAIN (order) = clauses;
16579 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
16582 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
16583 if (bind == NULL_TREE)
16585 if (!flag_openmp) /* flag_openmp_simd */
16587 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
16588 kind = OMP_CLAUSE_BIND_TEAMS;
16589 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
16590 kind = OMP_CLAUSE_BIND_PARALLEL;
16591 else
16593 for (; octx; octx = octx->outer_context)
16595 if ((octx->region_type & ORT_ACC) != 0
16596 || octx->region_type == ORT_NONE
16597 || octx->region_type == ORT_IMPLICIT_TARGET)
16598 continue;
16599 break;
16601 if (octx == NULL && !in_omp_construct)
16602 error_at (EXPR_LOCATION (for_stmt),
16603 "%<bind%> clause not specified on a %<loop%> "
16604 "construct not nested inside another OpenMP construct");
16606 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
16607 OMP_CLAUSE_CHAIN (bind) = clauses;
16608 OMP_CLAUSE_BIND_KIND (bind) = kind;
16609 OMP_FOR_CLAUSES (for_stmt) = bind;
16611 else
16612 switch (OMP_CLAUSE_BIND_KIND (bind))
16614 case OMP_CLAUSE_BIND_THREAD:
16615 break;
16616 case OMP_CLAUSE_BIND_PARALLEL:
16617 if (!flag_openmp) /* flag_openmp_simd */
16619 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
16620 break;
16622 for (; octx; octx = octx->outer_context)
16623 if (octx->region_type == ORT_SIMD
16624 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
16626 error_at (EXPR_LOCATION (for_stmt),
16627 "%<bind(parallel)%> on a %<loop%> construct nested "
16628 "inside %<simd%> construct");
16629 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
16630 break;
16632 kind = OMP_CLAUSE_BIND_PARALLEL;
16633 break;
16634 case OMP_CLAUSE_BIND_TEAMS:
16635 if (!flag_openmp) /* flag_openmp_simd */
16637 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
16638 break;
16640 if ((octx
16641 && octx->region_type != ORT_IMPLICIT_TARGET
16642 && octx->region_type != ORT_NONE
16643 && (octx->region_type & ORT_TEAMS) == 0)
16644 || in_omp_construct)
16646 error_at (EXPR_LOCATION (for_stmt),
16647 "%<bind(teams)%> on a %<loop%> region not strictly "
16648 "nested inside of a %<teams%> region");
16649 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
16650 break;
16652 kind = OMP_CLAUSE_BIND_TEAMS;
16653 break;
16654 default:
16655 gcc_unreachable ();
16658 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
16659 switch (OMP_CLAUSE_CODE (*pc))
16661 case OMP_CLAUSE_REDUCTION:
16662 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
16664 error_at (OMP_CLAUSE_LOCATION (*pc),
16665 "%<inscan%> %<reduction%> clause on "
16666 "%qs construct", "loop");
16667 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
16669 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
16671 error_at (OMP_CLAUSE_LOCATION (*pc),
16672 "invalid %<task%> reduction modifier on construct "
16673 "other than %<parallel%>, %qs or %<sections%>",
16674 lang_GNU_Fortran () ? "do" : "for");
16675 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
16677 pc = &OMP_CLAUSE_CHAIN (*pc);
16678 break;
16679 case OMP_CLAUSE_LASTPRIVATE:
16680 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
16682 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
16683 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
16684 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
16685 break;
16686 if (OMP_FOR_ORIG_DECLS (for_stmt)
16687 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
16688 i)) == TREE_LIST
16689 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
16690 i)))
16692 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
16693 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
16694 break;
16697 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
16699 error_at (OMP_CLAUSE_LOCATION (*pc),
16700 "%<lastprivate%> clause on a %<loop%> construct refers "
16701 "to a variable %qD which is not the loop iterator",
16702 OMP_CLAUSE_DECL (*pc));
16703 *pc = OMP_CLAUSE_CHAIN (*pc);
16704 break;
16706 pc = &OMP_CLAUSE_CHAIN (*pc);
16707 break;
16708 default:
16709 pc = &OMP_CLAUSE_CHAIN (*pc);
16710 break;
16713 TREE_SET_CODE (for_stmt, OMP_SIMD);
16715 int last;
16716 switch (kind)
16718 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
16719 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
16720 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
16722 for (int pass = 1; pass <= last; pass++)
16724 if (pass == 2)
16726 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
16727 make_node (BLOCK));
16728 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
16729 *expr_p = make_node (OMP_PARALLEL);
16730 TREE_TYPE (*expr_p) = void_type_node;
16731 OMP_PARALLEL_BODY (*expr_p) = bind;
16732 OMP_PARALLEL_COMBINED (*expr_p) = 1;
16733 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
16734 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
16735 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
16736 if (OMP_FOR_ORIG_DECLS (for_stmt)
16737 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
16738 == TREE_LIST))
16740 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
16741 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
16743 *pc = build_omp_clause (UNKNOWN_LOCATION,
16744 OMP_CLAUSE_FIRSTPRIVATE);
16745 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
16746 pc = &OMP_CLAUSE_CHAIN (*pc);
16750 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
16751 tree *pc = &OMP_FOR_CLAUSES (t);
16752 TREE_TYPE (t) = void_type_node;
16753 OMP_FOR_BODY (t) = *expr_p;
16754 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
16755 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
16756 switch (OMP_CLAUSE_CODE (c))
16758 case OMP_CLAUSE_BIND:
16759 case OMP_CLAUSE_ORDER:
16760 case OMP_CLAUSE_COLLAPSE:
16761 *pc = copy_node (c);
16762 pc = &OMP_CLAUSE_CHAIN (*pc);
16763 break;
16764 case OMP_CLAUSE_PRIVATE:
16765 case OMP_CLAUSE_FIRSTPRIVATE:
16766 /* Only needed on innermost. */
16767 break;
16768 case OMP_CLAUSE_LASTPRIVATE:
16769 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
16771 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
16772 OMP_CLAUSE_FIRSTPRIVATE);
16773 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
16774 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
16775 pc = &OMP_CLAUSE_CHAIN (*pc);
16777 *pc = copy_node (c);
16778 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
16779 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
16780 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
16782 if (pass != last)
16783 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
16784 else
16785 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
16786 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
16788 pc = &OMP_CLAUSE_CHAIN (*pc);
16789 break;
16790 case OMP_CLAUSE_REDUCTION:
16791 *pc = copy_node (c);
16792 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
16793 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
16794 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
16796 auto_vec<tree> no_context_vars;
16797 int walk_subtrees = 0;
16798 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
16799 &walk_subtrees, &no_context_vars);
16800 if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
16801 note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
16802 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
16803 note_no_context_vars,
16804 &no_context_vars);
16805 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
16806 note_no_context_vars,
16807 &no_context_vars);
16809 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
16810 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
16811 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
16812 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
16813 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
16815 hash_map<tree, tree> decl_map;
16816 decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
16817 decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
16818 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
16819 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
16820 decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
16821 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
16823 copy_body_data id;
16824 memset (&id, 0, sizeof (id));
16825 id.src_fn = current_function_decl;
16826 id.dst_fn = current_function_decl;
16827 id.src_cfun = cfun;
16828 id.decl_map = &decl_map;
16829 id.copy_decl = copy_decl_no_change;
16830 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
16831 id.transform_new_cfg = true;
16832 id.transform_return_to_modify = false;
16833 id.eh_lp_nr = 0;
16834 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
16835 &id, NULL);
16836 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
16837 &id, NULL);
16839 for (tree d : no_context_vars)
16841 DECL_CONTEXT (d) = NULL_TREE;
16842 DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
16845 else
16847 OMP_CLAUSE_REDUCTION_INIT (*pc)
16848 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
16849 OMP_CLAUSE_REDUCTION_MERGE (*pc)
16850 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
16852 pc = &OMP_CLAUSE_CHAIN (*pc);
16853 break;
16854 default:
16855 gcc_unreachable ();
16857 *pc = NULL_TREE;
16858 *expr_p = t;
16860 return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
16864 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
16865 of OMP_TARGET's body. */
16867 static tree
16868 find_omp_teams (tree *tp, int *walk_subtrees, void *)
16870 *walk_subtrees = 0;
16871 switch (TREE_CODE (*tp))
16873 case OMP_TEAMS:
16874 return *tp;
16875 case BIND_EXPR:
16876 case STATEMENT_LIST:
16877 *walk_subtrees = 1;
16878 break;
16879 default:
16880 break;
16882 return NULL_TREE;
16885 /* Helper function of optimize_target_teams, determine if the expression
16886 can be computed safely before the target construct on the host. */
16888 static tree
16889 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
16891 splay_tree_node n;
16893 if (TYPE_P (*tp))
16895 *walk_subtrees = 0;
16896 return NULL_TREE;
16898 switch (TREE_CODE (*tp))
16900 case VAR_DECL:
16901 case PARM_DECL:
16902 case RESULT_DECL:
16903 *walk_subtrees = 0;
16904 if (error_operand_p (*tp)
16905 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
16906 || DECL_HAS_VALUE_EXPR_P (*tp)
16907 || DECL_THREAD_LOCAL_P (*tp)
16908 || TREE_SIDE_EFFECTS (*tp)
16909 || TREE_THIS_VOLATILE (*tp))
16910 return *tp;
16911 if (is_global_var (*tp)
16912 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
16913 || lookup_attribute ("omp declare target link",
16914 DECL_ATTRIBUTES (*tp))))
16915 return *tp;
16916 if (VAR_P (*tp)
16917 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
16918 && !is_global_var (*tp)
16919 && decl_function_context (*tp) == current_function_decl)
16920 return *tp;
16921 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
16922 (splay_tree_key) *tp);
16923 if (n == NULL)
16925 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
16926 return NULL_TREE;
16927 return *tp;
16929 else if (n->value & GOVD_LOCAL)
16930 return *tp;
16931 else if (n->value & GOVD_FIRSTPRIVATE)
16932 return NULL_TREE;
16933 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
16934 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
16935 return NULL_TREE;
16936 return *tp;
16937 case INTEGER_CST:
16938 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
16939 return *tp;
16940 return NULL_TREE;
16941 case TARGET_EXPR:
16942 if (TARGET_EXPR_INITIAL (*tp)
16943 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
16944 return *tp;
16945 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
16946 walk_subtrees, NULL);
16947 /* Allow some reasonable subset of integral arithmetics. */
16948 case PLUS_EXPR:
16949 case MINUS_EXPR:
16950 case MULT_EXPR:
16951 case TRUNC_DIV_EXPR:
16952 case CEIL_DIV_EXPR:
16953 case FLOOR_DIV_EXPR:
16954 case ROUND_DIV_EXPR:
16955 case TRUNC_MOD_EXPR:
16956 case CEIL_MOD_EXPR:
16957 case FLOOR_MOD_EXPR:
16958 case ROUND_MOD_EXPR:
16959 case RDIV_EXPR:
16960 case EXACT_DIV_EXPR:
16961 case MIN_EXPR:
16962 case MAX_EXPR:
16963 case LSHIFT_EXPR:
16964 case RSHIFT_EXPR:
16965 case BIT_IOR_EXPR:
16966 case BIT_XOR_EXPR:
16967 case BIT_AND_EXPR:
16968 case NEGATE_EXPR:
16969 case ABS_EXPR:
16970 case BIT_NOT_EXPR:
16971 case NON_LVALUE_EXPR:
16972 CASE_CONVERT:
16973 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
16974 return *tp;
16975 return NULL_TREE;
16976 /* And disallow anything else, except for comparisons. */
16977 default:
16978 if (COMPARISON_CLASS_P (*tp))
16979 return NULL_TREE;
16980 return *tp;
16984 /* Try to determine if the num_teams and/or thread_limit expressions
16985 can have their values determined already before entering the
16986 target construct.
16987 INTEGER_CSTs trivially are,
16988 integral decls that are firstprivate (explicitly or implicitly)
16989 or explicitly map(always, to:) or map(always, tofrom:) on the target
16990 region too, and expressions involving simple arithmetics on those
16991 too, function calls are not ok, dereferencing something neither etc.
16992 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
16993 EXPR based on what we find:
16994 0 stands for clause not specified at all, use implementation default
16995 -1 stands for value that can't be determined easily before entering
16996 the target construct.
16997 -2 means that no explicit teams construct was specified
16998 If teams construct is not present at all, use 1 for num_teams
16999 and 0 for thread_limit (only one team is involved, and the thread
17000 limit is implementation defined. */
17002 static void
17003 optimize_target_teams (tree target, gimple_seq *pre_p)
17005 tree body = OMP_BODY (target);
17006 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
17007 tree num_teams_lower = NULL_TREE;
17008 tree num_teams_upper = integer_zero_node;
17009 tree thread_limit = integer_zero_node;
17010 location_t num_teams_loc = EXPR_LOCATION (target);
17011 location_t thread_limit_loc = EXPR_LOCATION (target);
17012 tree c, *p, expr;
17013 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
17015 if (teams == NULL_TREE)
17016 num_teams_upper = build_int_cst (integer_type_node, -2);
17017 else
17018 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
17020 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
17022 p = &num_teams_upper;
17023 num_teams_loc = OMP_CLAUSE_LOCATION (c);
17024 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
17026 expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
17027 if (TREE_CODE (expr) == INTEGER_CST)
17028 num_teams_lower = expr;
17029 else if (walk_tree (&expr, computable_teams_clause,
17030 NULL, NULL))
17031 num_teams_lower = integer_minus_one_node;
17032 else
17034 num_teams_lower = expr;
17035 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
17036 if (gimplify_expr (&num_teams_lower, pre_p, NULL,
17037 is_gimple_val, fb_rvalue, false)
17038 == GS_ERROR)
17040 gimplify_omp_ctxp = target_ctx;
17041 num_teams_lower = integer_minus_one_node;
17043 else
17045 gimplify_omp_ctxp = target_ctx;
17046 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
17047 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
17048 = num_teams_lower;
17053 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
17055 p = &thread_limit;
17056 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
17058 else
17059 continue;
17060 expr = OMP_CLAUSE_OPERAND (c, 0);
17061 if (TREE_CODE (expr) == INTEGER_CST)
17063 *p = expr;
17064 continue;
17066 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
17068 *p = integer_minus_one_node;
17069 continue;
17071 *p = expr;
17072 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
17073 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
17074 == GS_ERROR)
17076 gimplify_omp_ctxp = target_ctx;
17077 *p = integer_minus_one_node;
17078 continue;
17080 gimplify_omp_ctxp = target_ctx;
17081 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
17082 OMP_CLAUSE_OPERAND (c, 0) = *p;
17084 if (!omp_find_clause (OMP_TARGET_CLAUSES (target), OMP_CLAUSE_THREAD_LIMIT))
17086 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
17087 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
17088 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
17089 OMP_TARGET_CLAUSES (target) = c;
17091 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
17092 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
17093 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
17094 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
17095 OMP_TARGET_CLAUSES (target) = c;
17098 /* Gimplify the gross structure of several OMP constructs. */
17100 static void
17101 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
17103 tree expr = *expr_p;
17104 gimple *stmt;
17105 gimple_seq body = NULL;
17106 enum omp_region_type ort;
17108 switch (TREE_CODE (expr))
17110 case OMP_SECTIONS:
17111 case OMP_SINGLE:
17112 ort = ORT_WORKSHARE;
17113 break;
17114 case OMP_SCOPE:
17115 ort = ORT_TASKGROUP;
17116 break;
17117 case OMP_TARGET:
17118 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
17119 break;
17120 case OACC_KERNELS:
17121 ort = ORT_ACC_KERNELS;
17122 break;
17123 case OACC_PARALLEL:
17124 ort = ORT_ACC_PARALLEL;
17125 break;
17126 case OACC_SERIAL:
17127 ort = ORT_ACC_SERIAL;
17128 break;
17129 case OACC_DATA:
17130 ort = ORT_ACC_DATA;
17131 break;
17132 case OMP_TARGET_DATA:
17133 ort = ORT_TARGET_DATA;
17134 break;
17135 case OMP_TEAMS:
17136 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
17137 if (gimplify_omp_ctxp == NULL
17138 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
17139 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
17140 break;
17141 case OACC_HOST_DATA:
17142 ort = ORT_ACC_HOST_DATA;
17143 break;
17144 default:
17145 gcc_unreachable ();
17148 bool save_in_omp_construct = in_omp_construct;
17149 if ((ort & ORT_ACC) == 0)
17150 in_omp_construct = false;
17151 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
17152 TREE_CODE (expr));
17153 if (TREE_CODE (expr) == OMP_TARGET)
17154 optimize_target_teams (expr, pre_p);
17155 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
17156 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
17158 push_gimplify_context ();
17159 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
17160 if (gimple_code (g) == GIMPLE_BIND)
17161 pop_gimplify_context (g);
17162 else
17163 pop_gimplify_context (NULL);
17164 if ((ort & ORT_TARGET_DATA) != 0)
17166 enum built_in_function end_ix;
17167 switch (TREE_CODE (expr))
17169 case OACC_DATA:
17170 case OACC_HOST_DATA:
17171 end_ix = BUILT_IN_GOACC_DATA_END;
17172 break;
17173 case OMP_TARGET_DATA:
17174 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
17175 break;
17176 default:
17177 gcc_unreachable ();
17179 tree fn = builtin_decl_explicit (end_ix);
17180 g = gimple_build_call (fn, 0);
17181 gimple_seq cleanup = NULL;
17182 gimple_seq_add_stmt (&cleanup, g);
17183 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
17184 body = NULL;
17185 gimple_seq_add_stmt (&body, g);
17188 else
17189 gimplify_and_add (OMP_BODY (expr), &body);
17190 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
17191 TREE_CODE (expr));
17192 in_omp_construct = save_in_omp_construct;
17194 switch (TREE_CODE (expr))
17196 case OACC_DATA:
17197 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
17198 OMP_CLAUSES (expr));
17199 break;
17200 case OACC_HOST_DATA:
17201 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
17203 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
17204 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
17205 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
17208 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
17209 OMP_CLAUSES (expr));
17210 break;
17211 case OACC_KERNELS:
17212 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
17213 OMP_CLAUSES (expr));
17214 break;
17215 case OACC_PARALLEL:
17216 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
17217 OMP_CLAUSES (expr));
17218 break;
17219 case OACC_SERIAL:
17220 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
17221 OMP_CLAUSES (expr));
17222 break;
17223 case OMP_SECTIONS:
17224 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
17225 break;
17226 case OMP_SINGLE:
17227 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
17228 break;
17229 case OMP_SCOPE:
17230 stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
17231 break;
17232 case OMP_TARGET:
17233 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
17234 OMP_CLAUSES (expr));
17235 break;
17236 case OMP_TARGET_DATA:
17237 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
17238 to be evaluated before the use_device_{ptr,addr} clauses if they
17239 refer to the same variables. */
17241 tree use_device_clauses;
17242 tree *pc, *uc = &use_device_clauses;
17243 for (pc = &OMP_CLAUSES (expr); *pc; )
17244 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
17245 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
17247 *uc = *pc;
17248 *pc = OMP_CLAUSE_CHAIN (*pc);
17249 uc = &OMP_CLAUSE_CHAIN (*uc);
17251 else
17252 pc = &OMP_CLAUSE_CHAIN (*pc);
17253 *uc = NULL_TREE;
17254 *pc = use_device_clauses;
17255 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
17256 OMP_CLAUSES (expr));
17258 break;
17259 case OMP_TEAMS:
17260 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
17261 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
17262 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
17263 break;
17264 default:
17265 gcc_unreachable ();
17268 gimplify_seq_add_stmt (pre_p, stmt);
17269 *expr_p = NULL_TREE;
17272 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
17273 target update constructs. */
17275 static void
17276 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
17278 tree expr = *expr_p;
17279 int kind;
17280 gomp_target *stmt;
17281 enum omp_region_type ort = ORT_WORKSHARE;
17283 switch (TREE_CODE (expr))
17285 case OACC_ENTER_DATA:
17286 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
17287 ort = ORT_ACC;
17288 break;
17289 case OACC_EXIT_DATA:
17290 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
17291 ort = ORT_ACC;
17292 break;
17293 case OACC_UPDATE:
17294 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
17295 ort = ORT_ACC;
17296 break;
17297 case OMP_TARGET_UPDATE:
17298 kind = GF_OMP_TARGET_KIND_UPDATE;
17299 break;
17300 case OMP_TARGET_ENTER_DATA:
17301 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
17302 break;
17303 case OMP_TARGET_EXIT_DATA:
17304 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
17305 break;
17306 default:
17307 gcc_unreachable ();
17309 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
17310 ort, TREE_CODE (expr));
17311 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
17312 TREE_CODE (expr));
17313 if (TREE_CODE (expr) == OACC_UPDATE
17314 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
17315 OMP_CLAUSE_IF_PRESENT))
17317 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
17318 clause. */
17319 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
17320 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
17321 switch (OMP_CLAUSE_MAP_KIND (c))
17323 case GOMP_MAP_FORCE_TO:
17324 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
17325 break;
17326 case GOMP_MAP_FORCE_FROM:
17327 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
17328 break;
17329 default:
17330 break;
17333 else if (TREE_CODE (expr) == OACC_EXIT_DATA
17334 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
17335 OMP_CLAUSE_FINALIZE))
17337 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
17338 semantics. */
17339 bool have_clause = false;
17340 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
17341 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
17342 switch (OMP_CLAUSE_MAP_KIND (c))
17344 case GOMP_MAP_FROM:
17345 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
17346 have_clause = true;
17347 break;
17348 case GOMP_MAP_RELEASE:
17349 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
17350 have_clause = true;
17351 break;
17352 case GOMP_MAP_TO_PSET:
17353 /* Fortran arrays with descriptors must map that descriptor when
17354 doing standalone "attach" operations (in OpenACC). In that
17355 case GOMP_MAP_TO_PSET appears by itself with no preceding
17356 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
17357 break;
17358 case GOMP_MAP_POINTER:
17359 /* TODO PR92929: we may see these here, but they'll always follow
17360 one of the clauses above, and will be handled by libgomp as
17361 one group, so no handling required here. */
17362 gcc_assert (have_clause);
17363 break;
17364 case GOMP_MAP_DETACH:
17365 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
17366 have_clause = false;
17367 break;
17368 case GOMP_MAP_STRUCT:
17369 case GOMP_MAP_STRUCT_UNORD:
17370 have_clause = false;
17371 break;
17372 default:
17373 gcc_unreachable ();
17376 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
17378 gimplify_seq_add_stmt (pre_p, stmt);
17379 *expr_p = NULL_TREE;
17382 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
17383 stabilized the lhs of the atomic operation as *ADDR. Return true if
17384 EXPR is this stabilized form. */
17386 static bool
17387 goa_lhs_expr_p (tree expr, tree addr)
17389 /* Also include casts to other type variants. The C front end is fond
17390 of adding these for e.g. volatile variables. This is like
17391 STRIP_TYPE_NOPS but includes the main variant lookup. */
17392 STRIP_USELESS_TYPE_CONVERSION (expr);
17394 if (INDIRECT_REF_P (expr))
17396 expr = TREE_OPERAND (expr, 0);
17397 while (expr != addr
17398 && (CONVERT_EXPR_P (expr)
17399 || TREE_CODE (expr) == NON_LVALUE_EXPR)
17400 && TREE_CODE (expr) == TREE_CODE (addr)
17401 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
17403 expr = TREE_OPERAND (expr, 0);
17404 addr = TREE_OPERAND (addr, 0);
17406 if (expr == addr)
17407 return true;
17408 return (TREE_CODE (addr) == ADDR_EXPR
17409 && TREE_CODE (expr) == ADDR_EXPR
17410 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
17412 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
17413 return true;
17414 return false;
17417 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
17418 expression does not involve the lhs, evaluate it into a temporary.
17419 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
17420 or -1 if an error was encountered. */
17422 static int
17423 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
17424 tree lhs_var, tree &target_expr, bool rhs, int depth)
17426 tree expr = *expr_p;
17427 int saw_lhs = 0;
17429 if (goa_lhs_expr_p (expr, lhs_addr))
17431 if (pre_p)
17432 *expr_p = lhs_var;
17433 return 1;
17435 if (is_gimple_val (expr))
17436 return 0;
17438 /* Maximum depth of lhs in expression is for the
17439 __builtin_clear_padding (...), __builtin_clear_padding (...),
17440 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
17441 if (++depth > 7)
17442 goto finish;
17444 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
17446 case tcc_binary:
17447 case tcc_comparison:
17448 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
17449 lhs_var, target_expr, true, depth);
17450 /* FALLTHRU */
17451 case tcc_unary:
17452 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
17453 lhs_var, target_expr, true, depth);
17454 break;
17455 case tcc_expression:
17456 switch (TREE_CODE (expr))
17458 case TRUTH_ANDIF_EXPR:
17459 case TRUTH_ORIF_EXPR:
17460 case TRUTH_AND_EXPR:
17461 case TRUTH_OR_EXPR:
17462 case TRUTH_XOR_EXPR:
17463 case BIT_INSERT_EXPR:
17464 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
17465 lhs_addr, lhs_var, target_expr, true,
17466 depth);
17467 /* FALLTHRU */
17468 case TRUTH_NOT_EXPR:
17469 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
17470 lhs_addr, lhs_var, target_expr, true,
17471 depth);
17472 break;
17473 case MODIFY_EXPR:
17474 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
17475 target_expr, true, depth))
17476 break;
17477 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
17478 lhs_addr, lhs_var, target_expr, true,
17479 depth);
17480 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
17481 lhs_addr, lhs_var, target_expr, false,
17482 depth);
17483 break;
17484 /* FALLTHRU */
17485 case ADDR_EXPR:
17486 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
17487 target_expr, true, depth))
17488 break;
17489 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
17490 lhs_addr, lhs_var, target_expr, false,
17491 depth);
17492 break;
17493 case COMPOUND_EXPR:
17494 /* Break out any preevaluations from cp_build_modify_expr. */
17495 for (; TREE_CODE (expr) == COMPOUND_EXPR;
17496 expr = TREE_OPERAND (expr, 1))
17498 /* Special-case __builtin_clear_padding call before
17499 __builtin_memcmp. */
17500 if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
17502 tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
17503 if (fndecl
17504 && fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
17505 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
17506 && (!pre_p
17507 || goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL,
17508 lhs_addr, lhs_var,
17509 target_expr, true, depth)))
17511 if (pre_p)
17512 *expr_p = expr;
17513 saw_lhs = goa_stabilize_expr (&TREE_OPERAND (expr, 0),
17514 pre_p, lhs_addr, lhs_var,
17515 target_expr, true, depth);
17516 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1),
17517 pre_p, lhs_addr, lhs_var,
17518 target_expr, rhs, depth);
17519 return saw_lhs;
17523 if (pre_p)
17524 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
17526 if (!pre_p)
17527 return goa_stabilize_expr (&expr, pre_p, lhs_addr, lhs_var,
17528 target_expr, rhs, depth);
17529 *expr_p = expr;
17530 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
17531 target_expr, rhs, depth);
17532 case COND_EXPR:
17533 if (!goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL, lhs_addr,
17534 lhs_var, target_expr, true, depth))
17535 break;
17536 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
17537 lhs_addr, lhs_var, target_expr, true,
17538 depth);
17539 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
17540 lhs_addr, lhs_var, target_expr, true,
17541 depth);
17542 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 2), pre_p,
17543 lhs_addr, lhs_var, target_expr, true,
17544 depth);
17545 break;
17546 case TARGET_EXPR:
17547 if (TARGET_EXPR_INITIAL (expr))
17549 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
17550 lhs_var, target_expr, true,
17551 depth))
17552 break;
17553 if (expr == target_expr)
17554 saw_lhs = 1;
17555 else
17557 saw_lhs = goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr),
17558 pre_p, lhs_addr, lhs_var,
17559 target_expr, true, depth);
17560 if (saw_lhs && target_expr == NULL_TREE && pre_p)
17561 target_expr = expr;
17564 break;
17565 default:
17566 break;
17568 break;
17569 case tcc_reference:
17570 if (TREE_CODE (expr) == BIT_FIELD_REF
17571 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
17572 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
17573 lhs_addr, lhs_var, target_expr, true,
17574 depth);
17575 break;
17576 case tcc_vl_exp:
17577 if (TREE_CODE (expr) == CALL_EXPR)
17579 if (tree fndecl = get_callee_fndecl (expr))
17580 if (fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING,
17581 BUILT_IN_MEMCMP))
17583 int nargs = call_expr_nargs (expr);
17584 for (int i = 0; i < nargs; i++)
17585 saw_lhs |= goa_stabilize_expr (&CALL_EXPR_ARG (expr, i),
17586 pre_p, lhs_addr, lhs_var,
17587 target_expr, true, depth);
17590 break;
17591 default:
17592 break;
17595 finish:
17596 if (saw_lhs == 0 && pre_p)
17598 enum gimplify_status gs;
17599 if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
17601 gimplify_stmt (&expr, pre_p);
17602 return saw_lhs;
17604 else if (rhs)
17605 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
17606 else
17607 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
17608 if (gs != GS_ALL_DONE)
17609 saw_lhs = -1;
17612 return saw_lhs;
17615 /* Gimplify an OMP_ATOMIC statement. */
17617 static enum gimplify_status
17618 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
17620 tree addr = TREE_OPERAND (*expr_p, 0);
17621 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
17622 ? NULL : TREE_OPERAND (*expr_p, 1);
17623 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
17624 tree tmp_load;
17625 gomp_atomic_load *loadstmt;
17626 gomp_atomic_store *storestmt;
17627 tree target_expr = NULL_TREE;
17629 tmp_load = create_tmp_reg (type);
17630 if (rhs
17631 && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load, target_expr,
17632 true, 0) < 0)
17633 return GS_ERROR;
17635 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
17636 != GS_ALL_DONE)
17637 return GS_ERROR;
17639 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
17640 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
17641 gimplify_seq_add_stmt (pre_p, loadstmt);
17642 if (rhs)
17644 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
17645 representatives. Use BIT_FIELD_REF on the lhs instead. */
17646 tree rhsarg = rhs;
17647 if (TREE_CODE (rhs) == COND_EXPR)
17648 rhsarg = TREE_OPERAND (rhs, 1);
17649 if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
17650 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
17652 tree bitpos = TREE_OPERAND (rhsarg, 2);
17653 tree op1 = TREE_OPERAND (rhsarg, 1);
17654 tree bitsize;
17655 tree tmp_store = tmp_load;
17656 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
17657 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
17658 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
17659 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
17660 else
17661 bitsize = TYPE_SIZE (TREE_TYPE (op1));
17662 gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
17663 tree t = build2_loc (EXPR_LOCATION (rhsarg),
17664 MODIFY_EXPR, void_type_node,
17665 build3_loc (EXPR_LOCATION (rhsarg),
17666 BIT_FIELD_REF, TREE_TYPE (op1),
17667 tmp_store, bitsize, bitpos), op1);
17668 if (TREE_CODE (rhs) == COND_EXPR)
17669 t = build3_loc (EXPR_LOCATION (rhs), COND_EXPR, void_type_node,
17670 TREE_OPERAND (rhs, 0), t, void_node);
17671 gimplify_and_add (t, pre_p);
17672 rhs = tmp_store;
17674 bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
17675 if (TREE_CODE (rhs) == COND_EXPR)
17676 gimplify_ctxp->allow_rhs_cond_expr = true;
17677 enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
17678 is_gimple_val, fb_rvalue);
17679 gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
17680 if (gs != GS_ALL_DONE)
17681 return GS_ERROR;
17684 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
17685 rhs = tmp_load;
17686 storestmt
17687 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
17688 if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
17690 gimple_omp_atomic_set_weak (loadstmt);
17691 gimple_omp_atomic_set_weak (storestmt);
17693 gimplify_seq_add_stmt (pre_p, storestmt);
17694 switch (TREE_CODE (*expr_p))
17696 case OMP_ATOMIC_READ:
17697 case OMP_ATOMIC_CAPTURE_OLD:
17698 *expr_p = tmp_load;
17699 gimple_omp_atomic_set_need_value (loadstmt);
17700 break;
17701 case OMP_ATOMIC_CAPTURE_NEW:
17702 *expr_p = rhs;
17703 gimple_omp_atomic_set_need_value (storestmt);
17704 break;
17705 default:
17706 *expr_p = NULL;
17707 break;
17710 return GS_ALL_DONE;
17713 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
17714 body, and adding some EH bits. */
17716 static enum gimplify_status
17717 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
17719 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
17720 gimple *body_stmt;
17721 gtransaction *trans_stmt;
17722 gimple_seq body = NULL;
17723 int subcode = 0;
17725 /* Wrap the transaction body in a BIND_EXPR so we have a context
17726 where to put decls for OMP. */
17727 if (TREE_CODE (tbody) != BIND_EXPR)
17729 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
17730 TREE_SIDE_EFFECTS (bind) = 1;
17731 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
17732 TRANSACTION_EXPR_BODY (expr) = bind;
17735 push_gimplify_context ();
17736 temp = voidify_wrapper_expr (*expr_p, NULL);
17738 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
17739 pop_gimplify_context (body_stmt);
17741 trans_stmt = gimple_build_transaction (body);
17742 if (TRANSACTION_EXPR_OUTER (expr))
17743 subcode = GTMA_IS_OUTER;
17744 else if (TRANSACTION_EXPR_RELAXED (expr))
17745 subcode = GTMA_IS_RELAXED;
17746 gimple_transaction_set_subcode (trans_stmt, subcode);
17748 gimplify_seq_add_stmt (pre_p, trans_stmt);
17750 if (temp)
17752 *expr_p = temp;
17753 return GS_OK;
17756 *expr_p = NULL_TREE;
17757 return GS_ALL_DONE;
17760 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
17761 is the OMP_BODY of the original EXPR (which has already been
17762 gimplified so it's not present in the EXPR).
17764 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
17766 static gimple *
17767 gimplify_omp_ordered (tree expr, gimple_seq body)
17769 tree c, decls;
17770 int failures = 0;
17771 unsigned int i;
17772 tree source_c = NULL_TREE;
17773 tree sink_c = NULL_TREE;
17775 if (gimplify_omp_ctxp)
17777 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
17778 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
17779 && gimplify_omp_ctxp->loop_iter_var.is_empty ())
17781 error_at (OMP_CLAUSE_LOCATION (c),
17782 "%<ordered%> construct with %qs clause must be "
17783 "closely nested inside a loop with %<ordered%> clause",
17784 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross");
17785 failures++;
17787 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
17788 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
17790 bool fail = false;
17791 sink_c = c;
17792 if (OMP_CLAUSE_DECL (c) == NULL_TREE)
17793 continue; /* omp_cur_iteration - 1 */
17794 for (decls = OMP_CLAUSE_DECL (c), i = 0;
17795 decls && TREE_CODE (decls) == TREE_LIST;
17796 decls = TREE_CHAIN (decls), ++i)
17797 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
17798 continue;
17799 else if (TREE_VALUE (decls)
17800 != gimplify_omp_ctxp->loop_iter_var[2 * i])
17802 error_at (OMP_CLAUSE_LOCATION (c),
17803 "variable %qE is not an iteration "
17804 "of outermost loop %d, expected %qE",
17805 TREE_VALUE (decls), i + 1,
17806 gimplify_omp_ctxp->loop_iter_var[2 * i]);
17807 fail = true;
17808 failures++;
17810 else
17811 TREE_VALUE (decls)
17812 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
17813 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
17815 error_at (OMP_CLAUSE_LOCATION (c),
17816 "number of variables in %qs clause with "
17817 "%<sink%> modifier does not match number of "
17818 "iteration variables",
17819 OMP_CLAUSE_DOACROSS_DEPEND (c)
17820 ? "depend" : "doacross");
17821 failures++;
17824 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
17825 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SOURCE)
17827 if (source_c)
17829 error_at (OMP_CLAUSE_LOCATION (c),
17830 "more than one %qs clause with %<source%> "
17831 "modifier on an %<ordered%> construct",
17832 OMP_CLAUSE_DOACROSS_DEPEND (source_c)
17833 ? "depend" : "doacross");
17834 failures++;
17836 else
17837 source_c = c;
17840 if (source_c && sink_c)
17842 error_at (OMP_CLAUSE_LOCATION (source_c),
17843 "%qs clause with %<source%> modifier specified "
17844 "together with %qs clauses with %<sink%> modifier "
17845 "on the same construct",
17846 OMP_CLAUSE_DOACROSS_DEPEND (source_c) ? "depend" : "doacross",
17847 OMP_CLAUSE_DOACROSS_DEPEND (sink_c) ? "depend" : "doacross");
17848 failures++;
17851 if (failures)
17852 return gimple_build_nop ();
17853 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
17856 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
17857 expression produces a value to be used as an operand inside a GIMPLE
17858 statement, the value will be stored back in *EXPR_P. This value will
17859 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
17860 an SSA_NAME. The corresponding sequence of GIMPLE statements is
17861 emitted in PRE_P and POST_P.
17863 Additionally, this process may overwrite parts of the input
17864 expression during gimplification. Ideally, it should be
17865 possible to do non-destructive gimplification.
17867 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
17868 the expression needs to evaluate to a value to be used as
17869 an operand in a GIMPLE statement, this value will be stored in
17870 *EXPR_P on exit. This happens when the caller specifies one
17871 of fb_lvalue or fb_rvalue fallback flags.
17873 PRE_P will contain the sequence of GIMPLE statements corresponding
17874 to the evaluation of EXPR and all the side-effects that must
17875 be executed before the main expression. On exit, the last
17876 statement of PRE_P is the core statement being gimplified. For
17877 instance, when gimplifying 'if (++a)' the last statement in
17878 PRE_P will be 'if (t.1)' where t.1 is the result of
17879 pre-incrementing 'a'.
17881 POST_P will contain the sequence of GIMPLE statements corresponding
17882 to the evaluation of all the side-effects that must be executed
17883 after the main expression. If this is NULL, the post
17884 side-effects are stored at the end of PRE_P.
17886 The reason why the output is split in two is to handle post
17887 side-effects explicitly. In some cases, an expression may have
17888 inner and outer post side-effects which need to be emitted in
17889 an order different from the one given by the recursive
17890 traversal. For instance, for the expression (*p--)++ the post
17891 side-effects of '--' must actually occur *after* the post
17892 side-effects of '++'. However, gimplification will first visit
17893 the inner expression, so if a separate POST sequence was not
17894 used, the resulting sequence would be:
17896 1 t.1 = *p
17897 2 p = p - 1
17898 3 t.2 = t.1 + 1
17899 4 *p = t.2
17901 However, the post-decrement operation in line #2 must not be
17902 evaluated until after the store to *p at line #4, so the
17903 correct sequence should be:
17905 1 t.1 = *p
17906 2 t.2 = t.1 + 1
17907 3 *p = t.2
17908 4 p = p - 1
17910 So, by specifying a separate post queue, it is possible
17911 to emit the post side-effects in the correct order.
17912 If POST_P is NULL, an internal queue will be used. Before
17913 returning to the caller, the sequence POST_P is appended to
17914 the main output sequence PRE_P.
17916 GIMPLE_TEST_F points to a function that takes a tree T and
17917 returns nonzero if T is in the GIMPLE form requested by the
17918 caller. The GIMPLE predicates are in gimple.cc.
17920 FALLBACK tells the function what sort of a temporary we want if
17921 gimplification cannot produce an expression that complies with
17922 GIMPLE_TEST_F.
17924 fb_none means that no temporary should be generated
17925 fb_rvalue means that an rvalue is OK to generate
17926 fb_lvalue means that an lvalue is OK to generate
17927 fb_either means that either is OK, but an lvalue is preferable.
17928 fb_mayfail means that gimplification may fail (in which case
17929 GS_ERROR will be returned)
17931 The return value is either GS_ERROR or GS_ALL_DONE, since this
17932 function iterates until EXPR is completely gimplified or an error
17933 occurs. */
17935 enum gimplify_status
17936 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
17937 bool (*gimple_test_f) (tree), fallback_t fallback)
17939 tree tmp;
17940 gimple_seq internal_pre = NULL;
17941 gimple_seq internal_post = NULL;
17942 tree save_expr;
17943 bool is_statement;
17944 location_t saved_location;
17945 enum gimplify_status ret;
17946 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
17947 tree label;
17949 save_expr = *expr_p;
17950 if (save_expr == NULL_TREE)
17951 return GS_ALL_DONE;
17953 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
17954 is_statement = gimple_test_f == is_gimple_stmt;
17955 if (is_statement)
17956 gcc_assert (pre_p);
17958 /* Consistency checks. */
17959 if (gimple_test_f == is_gimple_reg)
17960 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
17961 else if (gimple_test_f == is_gimple_val
17962 || gimple_test_f == is_gimple_call_addr
17963 || gimple_test_f == is_gimple_condexpr_for_cond
17964 || gimple_test_f == is_gimple_mem_rhs
17965 || gimple_test_f == is_gimple_mem_rhs_or_call
17966 || gimple_test_f == is_gimple_reg_rhs
17967 || gimple_test_f == is_gimple_reg_rhs_or_call
17968 || gimple_test_f == is_gimple_asm_val
17969 || gimple_test_f == is_gimple_mem_ref_addr)
17970 gcc_assert (fallback & fb_rvalue);
17971 else if (gimple_test_f == is_gimple_min_lval
17972 || gimple_test_f == is_gimple_lvalue)
17973 gcc_assert (fallback & fb_lvalue);
17974 else if (gimple_test_f == is_gimple_addressable)
17975 gcc_assert (fallback & fb_either);
17976 else if (gimple_test_f == is_gimple_stmt)
17977 gcc_assert (fallback == fb_none);
17978 else
17980 /* We should have recognized the GIMPLE_TEST_F predicate to
17981 know what kind of fallback to use in case a temporary is
17982 needed to hold the value or address of *EXPR_P. */
17983 gcc_unreachable ();
17986 /* We used to check the predicate here and return immediately if it
17987 succeeds. This is wrong; the design is for gimplification to be
17988 idempotent, and for the predicates to only test for valid forms, not
17989 whether they are fully simplified. */
17990 if (pre_p == NULL)
17991 pre_p = &internal_pre;
17993 if (post_p == NULL)
17994 post_p = &internal_post;
17996 /* Remember the last statements added to PRE_P and POST_P. Every
17997 new statement added by the gimplification helpers needs to be
17998 annotated with location information. To centralize the
17999 responsibility, we remember the last statement that had been
18000 added to both queues before gimplifying *EXPR_P. If
18001 gimplification produces new statements in PRE_P and POST_P, those
18002 statements will be annotated with the same location information
18003 as *EXPR_P. */
18004 pre_last_gsi = gsi_last (*pre_p);
18005 post_last_gsi = gsi_last (*post_p);
18007 saved_location = input_location;
18008 if (save_expr != error_mark_node
18009 && EXPR_HAS_LOCATION (*expr_p))
18010 input_location = EXPR_LOCATION (*expr_p);
18012 /* Loop over the specific gimplifiers until the toplevel node
18013 remains the same. */
18016 /* Strip away as many useless type conversions as possible
18017 at the toplevel. */
18018 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
18020 /* Remember the expr. */
18021 save_expr = *expr_p;
18023 /* Die, die, die, my darling. */
18024 if (error_operand_p (save_expr))
18026 ret = GS_ERROR;
18027 break;
18030 /* Do any language-specific gimplification. */
18031 ret = ((enum gimplify_status)
18032 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
18033 if (ret == GS_OK)
18035 if (*expr_p == NULL_TREE)
18036 break;
18037 if (*expr_p != save_expr)
18038 continue;
18040 else if (ret != GS_UNHANDLED)
18041 break;
18043 /* Make sure that all the cases set 'ret' appropriately. */
18044 ret = GS_UNHANDLED;
18045 switch (TREE_CODE (*expr_p))
18047 /* First deal with the special cases. */
18049 case POSTINCREMENT_EXPR:
18050 case POSTDECREMENT_EXPR:
18051 case PREINCREMENT_EXPR:
18052 case PREDECREMENT_EXPR:
18053 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
18054 fallback != fb_none,
18055 TREE_TYPE (*expr_p));
18056 break;
18058 case VIEW_CONVERT_EXPR:
18059 if ((fallback & fb_rvalue)
18060 && is_gimple_reg_type (TREE_TYPE (*expr_p))
18061 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
18063 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
18064 post_p, is_gimple_val, fb_rvalue);
18065 recalculate_side_effects (*expr_p);
18066 break;
18068 /* Fallthru. */
18070 case ARRAY_REF:
18071 case ARRAY_RANGE_REF:
18072 case REALPART_EXPR:
18073 case IMAGPART_EXPR:
18074 case COMPONENT_REF:
18075 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
18076 fallback ? fallback : fb_rvalue);
18077 break;
18079 case COND_EXPR:
18080 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
18082 /* C99 code may assign to an array in a structure value of a
18083 conditional expression, and this has undefined behavior
18084 only on execution, so create a temporary if an lvalue is
18085 required. */
18086 if (fallback == fb_lvalue)
18088 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
18089 mark_addressable (*expr_p);
18090 ret = GS_OK;
18092 break;
18094 case CALL_EXPR:
18095 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
18097 /* C99 code may assign to an array in a structure returned
18098 from a function, and this has undefined behavior only on
18099 execution, so create a temporary if an lvalue is
18100 required. */
18101 if (fallback == fb_lvalue)
18103 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
18104 mark_addressable (*expr_p);
18105 ret = GS_OK;
18107 break;
18109 case TREE_LIST:
18110 gcc_unreachable ();
18112 case OMP_ARRAY_SECTION:
18113 gcc_unreachable ();
18115 case COMPOUND_EXPR:
18116 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
18117 break;
18119 case COMPOUND_LITERAL_EXPR:
18120 ret = gimplify_compound_literal_expr (expr_p, pre_p,
18121 gimple_test_f, fallback);
18122 break;
18124 case MODIFY_EXPR:
18125 case INIT_EXPR:
18126 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
18127 fallback != fb_none);
18128 break;
18130 case TRUTH_ANDIF_EXPR:
18131 case TRUTH_ORIF_EXPR:
18133 /* Preserve the original type of the expression and the
18134 source location of the outer expression. */
18135 tree org_type = TREE_TYPE (*expr_p);
18136 *expr_p = gimple_boolify (*expr_p);
18137 *expr_p = build3_loc (input_location, COND_EXPR,
18138 org_type, *expr_p,
18139 fold_convert_loc
18140 (input_location,
18141 org_type, boolean_true_node),
18142 fold_convert_loc
18143 (input_location,
18144 org_type, boolean_false_node));
18145 ret = GS_OK;
18146 break;
18149 case TRUTH_NOT_EXPR:
18151 tree type = TREE_TYPE (*expr_p);
18152 /* The parsers are careful to generate TRUTH_NOT_EXPR
18153 only with operands that are always zero or one.
18154 We do not fold here but handle the only interesting case
18155 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
18156 *expr_p = gimple_boolify (*expr_p);
18157 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
18158 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
18159 TREE_TYPE (*expr_p),
18160 TREE_OPERAND (*expr_p, 0));
18161 else
18162 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
18163 TREE_TYPE (*expr_p),
18164 TREE_OPERAND (*expr_p, 0),
18165 build_int_cst (TREE_TYPE (*expr_p), 1));
18166 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
18167 *expr_p = fold_convert_loc (input_location, type, *expr_p);
18168 ret = GS_OK;
18169 break;
18172 case ADDR_EXPR:
18173 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
18174 break;
18176 case ANNOTATE_EXPR:
18178 tree cond = TREE_OPERAND (*expr_p, 0);
18179 tree kind = TREE_OPERAND (*expr_p, 1);
18180 tree data = TREE_OPERAND (*expr_p, 2);
18181 tree type = TREE_TYPE (cond);
18182 if (!INTEGRAL_TYPE_P (type))
18184 *expr_p = cond;
18185 ret = GS_OK;
18186 break;
18188 tree tmp = create_tmp_var (type);
18189 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
18190 gcall *call
18191 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
18192 gimple_call_set_lhs (call, tmp);
18193 gimplify_seq_add_stmt (pre_p, call);
18194 *expr_p = tmp;
18195 ret = GS_ALL_DONE;
18196 break;
18199 case VA_ARG_EXPR:
18200 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
18201 break;
18203 CASE_CONVERT:
18204 if (IS_EMPTY_STMT (*expr_p))
18206 ret = GS_ALL_DONE;
18207 break;
18210 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
18211 || fallback == fb_none)
18213 /* Just strip a conversion to void (or in void context) and
18214 try again. */
18215 *expr_p = TREE_OPERAND (*expr_p, 0);
18216 ret = GS_OK;
18217 break;
18220 ret = gimplify_conversion (expr_p);
18221 if (ret == GS_ERROR)
18222 break;
18223 if (*expr_p != save_expr)
18224 break;
18225 /* FALLTHRU */
18227 case FIX_TRUNC_EXPR:
18228 /* unary_expr: ... | '(' cast ')' val | ... */
18229 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
18230 is_gimple_val, fb_rvalue);
18231 recalculate_side_effects (*expr_p);
18232 break;
18234 case INDIRECT_REF:
18236 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
18237 bool notrap = TREE_THIS_NOTRAP (*expr_p);
18238 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
18240 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
18241 if (*expr_p != save_expr)
18243 ret = GS_OK;
18244 break;
18247 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
18248 is_gimple_reg, fb_rvalue);
18249 if (ret == GS_ERROR)
18250 break;
18252 recalculate_side_effects (*expr_p);
18253 *expr_p = fold_build2_loc (input_location, MEM_REF,
18254 TREE_TYPE (*expr_p),
18255 TREE_OPERAND (*expr_p, 0),
18256 build_int_cst (saved_ptr_type, 0));
18257 TREE_THIS_VOLATILE (*expr_p) = volatilep;
18258 TREE_THIS_NOTRAP (*expr_p) = notrap;
18259 ret = GS_OK;
18260 break;
18263 /* We arrive here through the various re-gimplifcation paths. */
18264 case MEM_REF:
18265 /* First try re-folding the whole thing. */
18266 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
18267 TREE_OPERAND (*expr_p, 0),
18268 TREE_OPERAND (*expr_p, 1));
18269 if (tmp)
18271 REF_REVERSE_STORAGE_ORDER (tmp)
18272 = REF_REVERSE_STORAGE_ORDER (*expr_p);
18273 *expr_p = tmp;
18274 recalculate_side_effects (*expr_p);
18275 ret = GS_OK;
18276 break;
18278 /* Avoid re-gimplifying the address operand if it is already
18279 in suitable form. Re-gimplifying would mark the address
18280 operand addressable. Always gimplify when not in SSA form
18281 as we still may have to gimplify decls with value-exprs. */
18282 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
18283 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
18285 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
18286 is_gimple_mem_ref_addr, fb_rvalue);
18287 if (ret == GS_ERROR)
18288 break;
18290 recalculate_side_effects (*expr_p);
18291 ret = GS_ALL_DONE;
18292 break;
18294 /* Constants need not be gimplified. */
18295 case INTEGER_CST:
18296 case REAL_CST:
18297 case FIXED_CST:
18298 case STRING_CST:
18299 case COMPLEX_CST:
18300 case VECTOR_CST:
18301 /* Drop the overflow flag on constants, we do not want
18302 that in the GIMPLE IL. */
18303 if (TREE_OVERFLOW_P (*expr_p))
18304 *expr_p = drop_tree_overflow (*expr_p);
18305 ret = GS_ALL_DONE;
18306 break;
18308 case CONST_DECL:
18309 /* If we require an lvalue, such as for ADDR_EXPR, retain the
18310 CONST_DECL node. Otherwise the decl is replaceable by its
18311 value. */
18312 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
18313 if (fallback & fb_lvalue)
18314 ret = GS_ALL_DONE;
18315 else
18317 *expr_p = DECL_INITIAL (*expr_p);
18318 ret = GS_OK;
18320 break;
18322 case DECL_EXPR:
18323 ret = gimplify_decl_expr (expr_p, pre_p);
18324 break;
18326 case BIND_EXPR:
18327 ret = gimplify_bind_expr (expr_p, pre_p);
18328 break;
18330 case LOOP_EXPR:
18331 ret = gimplify_loop_expr (expr_p, pre_p);
18332 break;
18334 case SWITCH_EXPR:
18335 ret = gimplify_switch_expr (expr_p, pre_p);
18336 break;
18338 case EXIT_EXPR:
18339 ret = gimplify_exit_expr (expr_p);
18340 break;
18342 case GOTO_EXPR:
18343 /* If the target is not LABEL, then it is a computed jump
18344 and the target needs to be gimplified. */
18345 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
18347 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
18348 NULL, is_gimple_val, fb_rvalue);
18349 if (ret == GS_ERROR)
18350 break;
18352 gimplify_seq_add_stmt (pre_p,
18353 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
18354 ret = GS_ALL_DONE;
18355 break;
18357 case PREDICT_EXPR:
18358 gimplify_seq_add_stmt (pre_p,
18359 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
18360 PREDICT_EXPR_OUTCOME (*expr_p)));
18361 ret = GS_ALL_DONE;
18362 break;
18364 case LABEL_EXPR:
18365 ret = gimplify_label_expr (expr_p, pre_p);
18366 label = LABEL_EXPR_LABEL (*expr_p);
18367 gcc_assert (decl_function_context (label) == current_function_decl);
18369 /* If the label is used in a goto statement, or address of the label
18370 is taken, we need to unpoison all variables that were seen so far.
18371 Doing so would prevent us from reporting a false positives. */
18372 if (asan_poisoned_variables
18373 && asan_used_labels != NULL
18374 && asan_used_labels->contains (label)
18375 && !gimplify_omp_ctxp)
18376 asan_poison_variables (asan_poisoned_variables, false, pre_p);
18377 break;
18379 case CASE_LABEL_EXPR:
18380 ret = gimplify_case_label_expr (expr_p, pre_p);
18382 if (gimplify_ctxp->live_switch_vars)
18383 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
18384 pre_p);
18385 break;
18387 case RETURN_EXPR:
18388 ret = gimplify_return_expr (*expr_p, pre_p);
18389 break;
18391 case CONSTRUCTOR:
18392 /* Don't reduce this in place; let gimplify_init_constructor work its
18393 magic. Buf if we're just elaborating this for side effects, just
18394 gimplify any element that has side-effects. */
18395 if (fallback == fb_none)
18397 unsigned HOST_WIDE_INT ix;
18398 tree val;
18399 tree temp = NULL_TREE;
18400 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
18401 if (TREE_SIDE_EFFECTS (val))
18402 append_to_statement_list (val, &temp);
18404 *expr_p = temp;
18405 ret = temp ? GS_OK : GS_ALL_DONE;
18407 /* C99 code may assign to an array in a constructed
18408 structure or union, and this has undefined behavior only
18409 on execution, so create a temporary if an lvalue is
18410 required. */
18411 else if (fallback == fb_lvalue)
18413 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
18414 mark_addressable (*expr_p);
18415 ret = GS_OK;
18417 else
18418 ret = GS_ALL_DONE;
18419 break;
18421 /* The following are special cases that are not handled by the
18422 original GIMPLE grammar. */
18424 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
18425 eliminated. */
18426 case SAVE_EXPR:
18427 ret = gimplify_save_expr (expr_p, pre_p, post_p);
18428 break;
18430 case BIT_FIELD_REF:
18431 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
18432 post_p, is_gimple_lvalue, fb_either);
18433 recalculate_side_effects (*expr_p);
18434 break;
18436 case TARGET_MEM_REF:
18438 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
18440 if (TMR_BASE (*expr_p))
18441 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
18442 post_p, is_gimple_mem_ref_addr, fb_either);
18443 if (TMR_INDEX (*expr_p))
18444 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
18445 post_p, is_gimple_val, fb_rvalue);
18446 if (TMR_INDEX2 (*expr_p))
18447 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
18448 post_p, is_gimple_val, fb_rvalue);
18449 /* TMR_STEP and TMR_OFFSET are always integer constants. */
18450 ret = MIN (r0, r1);
18452 break;
18454 case NON_LVALUE_EXPR:
18455 /* This should have been stripped above. */
18456 gcc_unreachable ();
18458 case ASM_EXPR:
18459 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
18460 break;
18462 case TRY_FINALLY_EXPR:
18463 case TRY_CATCH_EXPR:
18465 gimple_seq eval, cleanup;
18466 gtry *try_;
18468 /* Calls to destructors are generated automatically in FINALLY/CATCH
18469 block. They should have location as UNKNOWN_LOCATION. However,
18470 gimplify_call_expr will reset these call stmts to input_location
18471 if it finds stmt's location is unknown. To prevent resetting for
18472 destructors, we set the input_location to unknown.
18473 Note that this only affects the destructor calls in FINALLY/CATCH
18474 block, and will automatically reset to its original value by the
18475 end of gimplify_expr. */
18476 input_location = UNKNOWN_LOCATION;
18477 eval = cleanup = NULL;
18478 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
18479 bool save_in_handler_expr = gimplify_ctxp->in_handler_expr;
18480 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
18481 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
18483 gimple_seq n = NULL, e = NULL;
18484 gimplify_ctxp->in_handler_expr = true;
18485 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
18486 0), &n);
18487 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
18488 1), &e);
18489 if (!gimple_seq_empty_p (n) || !gimple_seq_empty_p (e))
18491 geh_else *stmt = gimple_build_eh_else (n, e);
18492 gimple_seq_add_stmt (&cleanup, stmt);
18495 else
18497 gimplify_ctxp->in_handler_expr = true;
18498 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
18500 gimplify_ctxp->in_handler_expr = save_in_handler_expr;
18501 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
18502 if (gimple_seq_empty_p (cleanup))
18504 gimple_seq_add_seq (pre_p, eval);
18505 ret = GS_ALL_DONE;
18506 break;
18508 try_ = gimple_build_try (eval, cleanup,
18509 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
18510 ? GIMPLE_TRY_FINALLY
18511 : GIMPLE_TRY_CATCH);
18512 if (EXPR_HAS_LOCATION (save_expr))
18513 gimple_set_location (try_, EXPR_LOCATION (save_expr));
18514 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
18515 gimple_set_location (try_, saved_location);
18516 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
18517 gimple_try_set_catch_is_cleanup (try_,
18518 TRY_CATCH_IS_CLEANUP (*expr_p));
18519 gimplify_seq_add_stmt (pre_p, try_);
18520 ret = GS_ALL_DONE;
18521 break;
18524 case CLEANUP_POINT_EXPR:
18525 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
18526 break;
18528 case TARGET_EXPR:
18529 ret = gimplify_target_expr (expr_p, pre_p, post_p);
18530 break;
18532 case CATCH_EXPR:
18534 gimple *c;
18535 gimple_seq handler = NULL;
18536 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
18537 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
18538 gimplify_seq_add_stmt (pre_p, c);
18539 ret = GS_ALL_DONE;
18540 break;
18543 case EH_FILTER_EXPR:
18545 gimple *ehf;
18546 gimple_seq failure = NULL;
18548 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
18549 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
18550 copy_warning (ehf, *expr_p);
18551 gimplify_seq_add_stmt (pre_p, ehf);
18552 ret = GS_ALL_DONE;
18553 break;
18556 case OBJ_TYPE_REF:
18558 enum gimplify_status r0, r1;
18559 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
18560 post_p, is_gimple_val, fb_rvalue);
18561 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
18562 post_p, is_gimple_val, fb_rvalue);
18563 TREE_SIDE_EFFECTS (*expr_p) = 0;
18564 ret = MIN (r0, r1);
18566 break;
18568 case LABEL_DECL:
18569 /* We get here when taking the address of a label. We mark
18570 the label as "forced"; meaning it can never be removed and
18571 it is a potential target for any computed goto. */
18572 FORCED_LABEL (*expr_p) = 1;
18573 ret = GS_ALL_DONE;
18574 break;
18576 case STATEMENT_LIST:
18577 ret = gimplify_statement_list (expr_p, pre_p);
18578 break;
18580 case WITH_SIZE_EXPR:
18582 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
18583 post_p == &internal_post ? NULL : post_p,
18584 gimple_test_f, fallback);
18585 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
18586 is_gimple_val, fb_rvalue);
18587 ret = GS_ALL_DONE;
18589 break;
18591 case VAR_DECL:
18592 case PARM_DECL:
18593 ret = gimplify_var_or_parm_decl (expr_p);
18594 break;
18596 case RESULT_DECL:
18597 /* When within an OMP context, notice uses of variables. */
18598 if (gimplify_omp_ctxp)
18599 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
18600 /* Handlers can refer to the function result; if that has been
18601 moved, we need to track it. */
18602 if (gimplify_ctxp->in_handler_expr && gimplify_ctxp->return_temp)
18603 *expr_p = gimplify_ctxp->return_temp;
18604 ret = GS_ALL_DONE;
18605 break;
18607 case DEBUG_EXPR_DECL:
18608 gcc_unreachable ();
18610 case DEBUG_BEGIN_STMT:
18611 gimplify_seq_add_stmt (pre_p,
18612 gimple_build_debug_begin_stmt
18613 (TREE_BLOCK (*expr_p),
18614 EXPR_LOCATION (*expr_p)));
18615 ret = GS_ALL_DONE;
18616 *expr_p = NULL;
18617 break;
18619 case SSA_NAME:
18620 /* Allow callbacks into the gimplifier during optimization. */
18621 ret = GS_ALL_DONE;
18622 break;
18624 case OMP_PARALLEL:
18625 gimplify_omp_parallel (expr_p, pre_p);
18626 ret = GS_ALL_DONE;
18627 break;
18629 case OMP_TASK:
18630 gimplify_omp_task (expr_p, pre_p);
18631 ret = GS_ALL_DONE;
18632 break;
18634 case OMP_SIMD:
18636 /* Temporarily disable into_ssa, as scan_omp_simd
18637 which calls copy_gimple_seq_and_replace_locals can't deal
18638 with SSA_NAMEs defined outside of the body properly. */
18639 bool saved_into_ssa = gimplify_ctxp->into_ssa;
18640 gimplify_ctxp->into_ssa = false;
18641 ret = gimplify_omp_for (expr_p, pre_p);
18642 gimplify_ctxp->into_ssa = saved_into_ssa;
18643 break;
18646 case OMP_FOR:
18647 case OMP_DISTRIBUTE:
18648 case OMP_TASKLOOP:
18649 case OMP_TILE:
18650 case OMP_UNROLL:
18651 case OACC_LOOP:
18652 ret = gimplify_omp_for (expr_p, pre_p);
18653 break;
18655 case OMP_LOOP:
18656 ret = gimplify_omp_loop (expr_p, pre_p);
18657 break;
18659 case OACC_CACHE:
18660 gimplify_oacc_cache (expr_p, pre_p);
18661 ret = GS_ALL_DONE;
18662 break;
18664 case OACC_DECLARE:
18665 gimplify_oacc_declare (expr_p, pre_p);
18666 ret = GS_ALL_DONE;
18667 break;
18669 case OACC_HOST_DATA:
18670 case OACC_DATA:
18671 case OACC_KERNELS:
18672 case OACC_PARALLEL:
18673 case OACC_SERIAL:
18674 case OMP_SCOPE:
18675 case OMP_SECTIONS:
18676 case OMP_SINGLE:
18677 case OMP_TARGET:
18678 case OMP_TARGET_DATA:
18679 case OMP_TEAMS:
18680 gimplify_omp_workshare (expr_p, pre_p);
18681 ret = GS_ALL_DONE;
18682 break;
18684 case OACC_ENTER_DATA:
18685 case OACC_EXIT_DATA:
18686 case OACC_UPDATE:
18687 case OMP_TARGET_UPDATE:
18688 case OMP_TARGET_ENTER_DATA:
18689 case OMP_TARGET_EXIT_DATA:
18690 gimplify_omp_target_update (expr_p, pre_p);
18691 ret = GS_ALL_DONE;
18692 break;
18694 case OMP_SECTION:
18695 case OMP_STRUCTURED_BLOCK:
18696 case OMP_MASTER:
18697 case OMP_MASKED:
18698 case OMP_ORDERED:
18699 case OMP_CRITICAL:
18700 case OMP_SCAN:
18702 gimple_seq body = NULL;
18703 gimple *g;
18704 bool saved_in_omp_construct = in_omp_construct;
18706 in_omp_construct = true;
18707 gimplify_and_add (OMP_BODY (*expr_p), &body);
18708 in_omp_construct = saved_in_omp_construct;
18709 switch (TREE_CODE (*expr_p))
18711 case OMP_SECTION:
18712 g = gimple_build_omp_section (body);
18713 break;
18714 case OMP_STRUCTURED_BLOCK:
18715 g = gimple_build_omp_structured_block (body);
18716 break;
18717 case OMP_MASTER:
18718 g = gimple_build_omp_master (body);
18719 break;
18720 case OMP_ORDERED:
18721 g = gimplify_omp_ordered (*expr_p, body);
18722 if (OMP_BODY (*expr_p) == NULL_TREE
18723 && gimple_code (g) == GIMPLE_OMP_ORDERED)
18724 gimple_omp_ordered_standalone (g);
18725 break;
18726 case OMP_MASKED:
18727 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p),
18728 pre_p, ORT_WORKSHARE, OMP_MASKED);
18729 gimplify_adjust_omp_clauses (pre_p, body,
18730 &OMP_MASKED_CLAUSES (*expr_p),
18731 OMP_MASKED);
18732 g = gimple_build_omp_masked (body,
18733 OMP_MASKED_CLAUSES (*expr_p));
18734 break;
18735 case OMP_CRITICAL:
18736 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
18737 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
18738 gimplify_adjust_omp_clauses (pre_p, body,
18739 &OMP_CRITICAL_CLAUSES (*expr_p),
18740 OMP_CRITICAL);
18741 g = gimple_build_omp_critical (body,
18742 OMP_CRITICAL_NAME (*expr_p),
18743 OMP_CRITICAL_CLAUSES (*expr_p));
18744 break;
18745 case OMP_SCAN:
18746 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
18747 pre_p, ORT_WORKSHARE, OMP_SCAN);
18748 gimplify_adjust_omp_clauses (pre_p, body,
18749 &OMP_SCAN_CLAUSES (*expr_p),
18750 OMP_SCAN);
18751 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
18752 break;
18753 default:
18754 gcc_unreachable ();
18756 gimplify_seq_add_stmt (pre_p, g);
18757 ret = GS_ALL_DONE;
18758 break;
18761 case OMP_TASKGROUP:
18763 gimple_seq body = NULL;
18765 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
18766 bool saved_in_omp_construct = in_omp_construct;
18767 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
18768 OMP_TASKGROUP);
18769 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
18771 in_omp_construct = true;
18772 gimplify_and_add (OMP_BODY (*expr_p), &body);
18773 in_omp_construct = saved_in_omp_construct;
18774 gimple_seq cleanup = NULL;
18775 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
18776 gimple *g = gimple_build_call (fn, 0);
18777 gimple_seq_add_stmt (&cleanup, g);
18778 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
18779 body = NULL;
18780 gimple_seq_add_stmt (&body, g);
18781 g = gimple_build_omp_taskgroup (body, *pclauses);
18782 gimplify_seq_add_stmt (pre_p, g);
18783 ret = GS_ALL_DONE;
18784 break;
18787 case OMP_ATOMIC:
18788 case OMP_ATOMIC_READ:
18789 case OMP_ATOMIC_CAPTURE_OLD:
18790 case OMP_ATOMIC_CAPTURE_NEW:
18791 ret = gimplify_omp_atomic (expr_p, pre_p);
18792 break;
18794 case TRANSACTION_EXPR:
18795 ret = gimplify_transaction (expr_p, pre_p);
18796 break;
18798 case TRUTH_AND_EXPR:
18799 case TRUTH_OR_EXPR:
18800 case TRUTH_XOR_EXPR:
18802 tree orig_type = TREE_TYPE (*expr_p);
18803 tree new_type, xop0, xop1;
18804 *expr_p = gimple_boolify (*expr_p);
18805 new_type = TREE_TYPE (*expr_p);
18806 if (!useless_type_conversion_p (orig_type, new_type))
18808 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
18809 ret = GS_OK;
18810 break;
18813 /* Boolified binary truth expressions are semantically equivalent
18814 to bitwise binary expressions. Canonicalize them to the
18815 bitwise variant. */
18816 switch (TREE_CODE (*expr_p))
18818 case TRUTH_AND_EXPR:
18819 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
18820 break;
18821 case TRUTH_OR_EXPR:
18822 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
18823 break;
18824 case TRUTH_XOR_EXPR:
18825 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
18826 break;
18827 default:
18828 break;
18830 /* Now make sure that operands have compatible type to
18831 expression's new_type. */
18832 xop0 = TREE_OPERAND (*expr_p, 0);
18833 xop1 = TREE_OPERAND (*expr_p, 1);
18834 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
18835 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
18836 new_type,
18837 xop0);
18838 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
18839 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
18840 new_type,
18841 xop1);
18842 /* Continue classified as tcc_binary. */
18843 goto expr_2;
18846 case VEC_COND_EXPR:
18847 goto expr_3;
18849 case VEC_PERM_EXPR:
18850 /* Classified as tcc_expression. */
18851 goto expr_3;
18853 case BIT_INSERT_EXPR:
18854 /* Argument 3 is a constant. */
18855 goto expr_2;
18857 case POINTER_PLUS_EXPR:
18859 enum gimplify_status r0, r1;
18860 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
18861 post_p, is_gimple_val, fb_rvalue);
18862 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
18863 post_p, is_gimple_val, fb_rvalue);
18864 recalculate_side_effects (*expr_p);
18865 ret = MIN (r0, r1);
18866 break;
18869 default:
18870 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
18872 case tcc_comparison:
18873 /* Handle comparison of objects of non scalar mode aggregates
18874 with a call to memcmp. It would be nice to only have to do
18875 this for variable-sized objects, but then we'd have to allow
18876 the same nest of reference nodes we allow for MODIFY_EXPR and
18877 that's too complex.
18879 Compare scalar mode aggregates as scalar mode values. Using
18880 memcmp for them would be very inefficient at best, and is
18881 plain wrong if bitfields are involved. */
18882 if (error_operand_p (TREE_OPERAND (*expr_p, 1)))
18883 ret = GS_ERROR;
18884 else
18886 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
18888 /* Vector comparisons need no boolification. */
18889 if (TREE_CODE (type) == VECTOR_TYPE)
18890 goto expr_2;
18891 else if (!AGGREGATE_TYPE_P (type))
18893 tree org_type = TREE_TYPE (*expr_p);
18894 *expr_p = gimple_boolify (*expr_p);
18895 if (!useless_type_conversion_p (org_type,
18896 TREE_TYPE (*expr_p)))
18898 *expr_p = fold_convert_loc (input_location,
18899 org_type, *expr_p);
18900 ret = GS_OK;
18902 else
18903 goto expr_2;
18905 else if (SCALAR_INT_MODE_P (TYPE_MODE (type)))
18906 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
18907 else
18908 ret = gimplify_variable_sized_compare (expr_p);
18910 break;
18912 /* If *EXPR_P does not need to be special-cased, handle it
18913 according to its class. */
18914 case tcc_unary:
18915 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
18916 post_p, is_gimple_val, fb_rvalue);
18917 break;
18919 case tcc_binary:
18920 expr_2:
18922 enum gimplify_status r0, r1;
18924 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
18925 post_p, is_gimple_val, fb_rvalue);
18926 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
18927 post_p, is_gimple_val, fb_rvalue);
18929 ret = MIN (r0, r1);
18930 break;
18933 expr_3:
18935 enum gimplify_status r0, r1, r2;
18937 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
18938 post_p, is_gimple_val, fb_rvalue);
18939 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
18940 post_p, is_gimple_val, fb_rvalue);
18941 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
18942 post_p, is_gimple_val, fb_rvalue);
18944 ret = MIN (MIN (r0, r1), r2);
18945 break;
18948 case tcc_declaration:
18949 case tcc_constant:
18950 ret = GS_ALL_DONE;
18951 goto dont_recalculate;
18953 default:
18954 gcc_unreachable ();
18957 recalculate_side_effects (*expr_p);
18959 dont_recalculate:
18960 break;
18963 gcc_assert (*expr_p || ret != GS_OK);
18965 while (ret == GS_OK);
18967 /* If we encountered an error_mark somewhere nested inside, either
18968 stub out the statement or propagate the error back out. */
18969 if (ret == GS_ERROR)
18971 if (is_statement)
18972 *expr_p = NULL;
18973 goto out;
18976 /* This was only valid as a return value from the langhook, which
18977 we handled. Make sure it doesn't escape from any other context. */
18978 gcc_assert (ret != GS_UNHANDLED);
18980 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
18982 /* We aren't looking for a value, and we don't have a valid
18983 statement. If it doesn't have side-effects, throw it away.
18984 We can also get here with code such as "*&&L;", where L is
18985 a LABEL_DECL that is marked as FORCED_LABEL. */
18986 if (TREE_CODE (*expr_p) == LABEL_DECL
18987 || !TREE_SIDE_EFFECTS (*expr_p))
18988 *expr_p = NULL;
18989 else if (!TREE_THIS_VOLATILE (*expr_p))
18991 /* This is probably a _REF that contains something nested that
18992 has side effects. Recurse through the operands to find it. */
18993 enum tree_code code = TREE_CODE (*expr_p);
18995 switch (code)
18997 case COMPONENT_REF:
18998 case REALPART_EXPR:
18999 case IMAGPART_EXPR:
19000 case VIEW_CONVERT_EXPR:
19001 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
19002 gimple_test_f, fallback);
19003 break;
19005 case ARRAY_REF:
19006 case ARRAY_RANGE_REF:
19007 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
19008 gimple_test_f, fallback);
19009 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
19010 gimple_test_f, fallback);
19011 break;
19013 default:
19014 /* Anything else with side-effects must be converted to
19015 a valid statement before we get here. */
19016 gcc_unreachable ();
19019 *expr_p = NULL;
19021 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
19022 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
19023 && !is_empty_type (TREE_TYPE (*expr_p)))
19025 /* Historically, the compiler has treated a bare reference
19026 to a non-BLKmode volatile lvalue as forcing a load. */
19027 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
19029 /* Normally, we do not want to create a temporary for a
19030 TREE_ADDRESSABLE type because such a type should not be
19031 copied by bitwise-assignment. However, we make an
19032 exception here, as all we are doing here is ensuring that
19033 we read the bytes that make up the type. We use
19034 create_tmp_var_raw because create_tmp_var will abort when
19035 given a TREE_ADDRESSABLE type. */
19036 tree tmp = create_tmp_var_raw (type, "vol");
19037 gimple_add_tmp_var (tmp);
19038 gimplify_assign (tmp, *expr_p, pre_p);
19039 *expr_p = NULL;
19041 else
19042 /* We can't do anything useful with a volatile reference to
19043 an incomplete type, so just throw it away. Likewise for
19044 a BLKmode type, since any implicit inner load should
19045 already have been turned into an explicit one by the
19046 gimplification process. */
19047 *expr_p = NULL;
19050 /* If we are gimplifying at the statement level, we're done. Tack
19051 everything together and return. */
19052 if (fallback == fb_none || is_statement)
19054 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
19055 it out for GC to reclaim it. */
19056 *expr_p = NULL_TREE;
19058 if (!gimple_seq_empty_p (internal_pre)
19059 || !gimple_seq_empty_p (internal_post))
19061 gimplify_seq_add_seq (&internal_pre, internal_post);
19062 gimplify_seq_add_seq (pre_p, internal_pre);
19065 /* The result of gimplifying *EXPR_P is going to be the last few
19066 statements in *PRE_P and *POST_P. Add location information
19067 to all the statements that were added by the gimplification
19068 helpers. */
19069 if (!gimple_seq_empty_p (*pre_p))
19070 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
19072 if (!gimple_seq_empty_p (*post_p))
19073 annotate_all_with_location_after (*post_p, post_last_gsi,
19074 input_location);
19076 goto out;
19079 #ifdef ENABLE_GIMPLE_CHECKING
19080 if (*expr_p)
19082 enum tree_code code = TREE_CODE (*expr_p);
19083 /* These expressions should already be in gimple IR form. */
19084 gcc_assert (code != MODIFY_EXPR
19085 && code != ASM_EXPR
19086 && code != BIND_EXPR
19087 && code != CATCH_EXPR
19088 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
19089 && code != EH_FILTER_EXPR
19090 && code != GOTO_EXPR
19091 && code != LABEL_EXPR
19092 && code != LOOP_EXPR
19093 && code != SWITCH_EXPR
19094 && code != TRY_FINALLY_EXPR
19095 && code != EH_ELSE_EXPR
19096 && code != OACC_PARALLEL
19097 && code != OACC_KERNELS
19098 && code != OACC_SERIAL
19099 && code != OACC_DATA
19100 && code != OACC_HOST_DATA
19101 && code != OACC_DECLARE
19102 && code != OACC_UPDATE
19103 && code != OACC_ENTER_DATA
19104 && code != OACC_EXIT_DATA
19105 && code != OACC_CACHE
19106 && code != OMP_CRITICAL
19107 && code != OMP_FOR
19108 && code != OACC_LOOP
19109 && code != OMP_MASTER
19110 && code != OMP_MASKED
19111 && code != OMP_TASKGROUP
19112 && code != OMP_ORDERED
19113 && code != OMP_PARALLEL
19114 && code != OMP_SCAN
19115 && code != OMP_SECTIONS
19116 && code != OMP_SECTION
19117 && code != OMP_STRUCTURED_BLOCK
19118 && code != OMP_SINGLE
19119 && code != OMP_SCOPE);
19121 #endif
19123 /* Otherwise we're gimplifying a subexpression, so the resulting
19124 value is interesting. If it's a valid operand that matches
19125 GIMPLE_TEST_F, we're done. Unless we are handling some
19126 post-effects internally; if that's the case, we need to copy into
19127 a temporary before adding the post-effects to POST_P. */
19128 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
19129 goto out;
19131 /* Otherwise, we need to create a new temporary for the gimplified
19132 expression. */
19134 /* We can't return an lvalue if we have an internal postqueue. The
19135 object the lvalue refers to would (probably) be modified by the
19136 postqueue; we need to copy the value out first, which means an
19137 rvalue. */
19138 if ((fallback & fb_lvalue)
19139 && gimple_seq_empty_p (internal_post)
19140 && is_gimple_addressable (*expr_p))
19142 /* An lvalue will do. Take the address of the expression, store it
19143 in a temporary, and replace the expression with an INDIRECT_REF of
19144 that temporary. */
19145 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
19146 unsigned int ref_align = get_object_alignment (*expr_p);
19147 tree ref_type = TREE_TYPE (*expr_p);
19148 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
19149 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
19150 if (TYPE_ALIGN (ref_type) != ref_align)
19151 ref_type = build_aligned_type (ref_type, ref_align);
19152 *expr_p = build2 (MEM_REF, ref_type,
19153 tmp, build_zero_cst (ref_alias_type));
19155 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
19157 /* An rvalue will do. Assign the gimplified expression into a
19158 new temporary TMP and replace the original expression with
19159 TMP. First, make sure that the expression has a type so that
19160 it can be assigned into a temporary. */
19161 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
19162 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
19164 else
19166 #ifdef ENABLE_GIMPLE_CHECKING
19167 if (!(fallback & fb_mayfail))
19169 fprintf (stderr, "gimplification failed:\n");
19170 print_generic_expr (stderr, *expr_p);
19171 debug_tree (*expr_p);
19172 internal_error ("gimplification failed");
19174 #endif
19175 gcc_assert (fallback & fb_mayfail);
19177 /* If this is an asm statement, and the user asked for the
19178 impossible, don't die. Fail and let gimplify_asm_expr
19179 issue an error. */
19180 ret = GS_ERROR;
19181 goto out;
19184 /* Make sure the temporary matches our predicate. */
19185 gcc_assert ((*gimple_test_f) (*expr_p));
19187 if (!gimple_seq_empty_p (internal_post))
19189 annotate_all_with_location (internal_post, input_location);
19190 gimplify_seq_add_seq (pre_p, internal_post);
19193 out:
19194 input_location = saved_location;
19195 return ret;
19198 /* Like gimplify_expr but make sure the gimplified result is not itself
19199 a SSA name (but a decl if it were). Temporaries required by
19200 evaluating *EXPR_P may be still SSA names. */
19202 static enum gimplify_status
19203 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
19204 bool (*gimple_test_f) (tree), fallback_t fallback,
19205 bool allow_ssa)
19207 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
19208 gimple_test_f, fallback);
19209 if (! allow_ssa
19210 && TREE_CODE (*expr_p) == SSA_NAME)
19211 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
19212 return ret;
19215 /* Look through TYPE for variable-sized objects and gimplify each such
19216 size that we find. Add to LIST_P any statements generated. */
19218 void
19219 gimplify_type_sizes (tree type, gimple_seq *list_p)
19221 if (type == NULL || type == error_mark_node)
19222 return;
19224 const bool ignored_p
19225 = TYPE_NAME (type)
19226 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
19227 && DECL_IGNORED_P (TYPE_NAME (type));
19228 tree t;
19230 /* We first do the main variant, then copy into any other variants. */
19231 type = TYPE_MAIN_VARIANT (type);
19233 /* Avoid infinite recursion. */
19234 if (TYPE_SIZES_GIMPLIFIED (type))
19235 return;
19237 TYPE_SIZES_GIMPLIFIED (type) = 1;
19239 switch (TREE_CODE (type))
19241 case INTEGER_TYPE:
19242 case ENUMERAL_TYPE:
19243 case BOOLEAN_TYPE:
19244 case REAL_TYPE:
19245 case FIXED_POINT_TYPE:
19246 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
19247 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
19249 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
19251 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
19252 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
19254 break;
19256 case ARRAY_TYPE:
19257 /* These types may not have declarations, so handle them here. */
19258 gimplify_type_sizes (TREE_TYPE (type), list_p);
19259 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
19260 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
19261 with assigned stack slots, for -O1+ -g they should be tracked
19262 by VTA. */
19263 if (!ignored_p
19264 && TYPE_DOMAIN (type)
19265 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
19267 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
19268 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
19269 DECL_IGNORED_P (t) = 0;
19270 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
19271 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
19272 DECL_IGNORED_P (t) = 0;
19274 break;
19276 case RECORD_TYPE:
19277 case UNION_TYPE:
19278 case QUAL_UNION_TYPE:
19279 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
19280 if (TREE_CODE (field) == FIELD_DECL)
19282 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
19283 /* Likewise, ensure variable offsets aren't removed. */
19284 if (!ignored_p
19285 && (t = DECL_FIELD_OFFSET (field))
19286 && VAR_P (t)
19287 && DECL_ARTIFICIAL (t))
19288 DECL_IGNORED_P (t) = 0;
19289 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
19290 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
19291 gimplify_type_sizes (TREE_TYPE (field), list_p);
19293 break;
19295 case POINTER_TYPE:
19296 case REFERENCE_TYPE:
19297 /* We used to recurse on the pointed-to type here, which turned out to
19298 be incorrect because its definition might refer to variables not
19299 yet initialized at this point if a forward declaration is involved.
19301 It was actually useful for anonymous pointed-to types to ensure
19302 that the sizes evaluation dominates every possible later use of the
19303 values. Restricting to such types here would be safe since there
19304 is no possible forward declaration around, but would introduce an
19305 undesirable middle-end semantic to anonymity. We then defer to
19306 front-ends the responsibility of ensuring that the sizes are
19307 evaluated both early and late enough, e.g. by attaching artificial
19308 type declarations to the tree. */
19309 break;
19311 default:
19312 break;
19315 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
19316 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
19318 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
19320 TYPE_SIZE (t) = TYPE_SIZE (type);
19321 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
19322 TYPE_SIZES_GIMPLIFIED (t) = 1;
19326 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
19327 a size or position, has had all of its SAVE_EXPRs evaluated.
19328 We add any required statements to *STMT_P. */
19330 void
19331 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
19333 tree expr = *expr_p;
19335 /* We don't do anything if the value isn't there, is constant, or contains
19336 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
19337 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
19338 will want to replace it with a new variable, but that will cause problems
19339 if this type is from outside the function. It's OK to have that here. */
19340 if (expr == NULL_TREE
19341 || is_gimple_constant (expr)
19342 || VAR_P (expr)
19343 || CONTAINS_PLACEHOLDER_P (expr))
19344 return;
19346 *expr_p = unshare_expr (expr);
19348 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
19349 if the def vanishes. */
19350 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
19352 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
19353 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
19354 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
19355 if (is_gimple_constant (*expr_p))
19356 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
19359 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
19360 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
19361 is true, also gimplify the parameters. */
19363 gbind *
19364 gimplify_body (tree fndecl, bool do_parms)
19366 location_t saved_location = input_location;
19367 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
19368 gimple *outer_stmt;
19369 gbind *outer_bind;
19371 timevar_push (TV_TREE_GIMPLIFY);
19373 init_tree_ssa (cfun);
19375 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
19376 gimplification. */
19377 default_rtl_profile ();
19379 gcc_assert (gimplify_ctxp == NULL);
19380 push_gimplify_context (true);
19382 if (flag_openacc || flag_openmp)
19384 gcc_assert (gimplify_omp_ctxp == NULL);
19385 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
19386 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
19389 /* Unshare most shared trees in the body and in that of any nested functions.
19390 It would seem we don't have to do this for nested functions because
19391 they are supposed to be output and then the outer function gimplified
19392 first, but the g++ front end doesn't always do it that way. */
19393 unshare_body (fndecl);
19394 unvisit_body (fndecl);
19396 /* Make sure input_location isn't set to something weird. */
19397 input_location = DECL_SOURCE_LOCATION (fndecl);
19399 /* Resolve callee-copies. This has to be done before processing
19400 the body so that DECL_VALUE_EXPR gets processed correctly. */
19401 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
19403 /* Gimplify the function's body. */
19404 seq = NULL;
19405 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
19406 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
19407 if (!outer_stmt)
19409 outer_stmt = gimple_build_nop ();
19410 gimplify_seq_add_stmt (&seq, outer_stmt);
19413 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
19414 not the case, wrap everything in a GIMPLE_BIND to make it so. */
19415 if (gimple_code (outer_stmt) == GIMPLE_BIND
19416 && (gimple_seq_first_nondebug_stmt (seq)
19417 == gimple_seq_last_nondebug_stmt (seq)))
19419 outer_bind = as_a <gbind *> (outer_stmt);
19420 if (gimple_seq_first_stmt (seq) != outer_stmt
19421 || gimple_seq_last_stmt (seq) != outer_stmt)
19423 /* If there are debug stmts before or after outer_stmt, move them
19424 inside of outer_bind body. */
19425 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
19426 gimple_seq second_seq = NULL;
19427 if (gimple_seq_first_stmt (seq) != outer_stmt
19428 && gimple_seq_last_stmt (seq) != outer_stmt)
19430 second_seq = gsi_split_seq_after (gsi);
19431 gsi_remove (&gsi, false);
19433 else if (gimple_seq_first_stmt (seq) != outer_stmt)
19434 gsi_remove (&gsi, false);
19435 else
19437 gsi_remove (&gsi, false);
19438 second_seq = seq;
19439 seq = NULL;
19441 gimple_seq_add_seq_without_update (&seq,
19442 gimple_bind_body (outer_bind));
19443 gimple_seq_add_seq_without_update (&seq, second_seq);
19444 gimple_bind_set_body (outer_bind, seq);
19447 else
19448 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
19450 DECL_SAVED_TREE (fndecl) = NULL_TREE;
19452 /* If we had callee-copies statements, insert them at the beginning
19453 of the function and clear DECL_HAS_VALUE_EXPR_P on the parameters. */
19454 if (!gimple_seq_empty_p (parm_stmts))
19456 tree parm;
19458 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
19459 if (parm_cleanup)
19461 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
19462 GIMPLE_TRY_FINALLY);
19463 parm_stmts = NULL;
19464 gimple_seq_add_stmt (&parm_stmts, g);
19466 gimple_bind_set_body (outer_bind, parm_stmts);
19468 for (parm = DECL_ARGUMENTS (current_function_decl);
19469 parm; parm = DECL_CHAIN (parm))
19470 if (DECL_HAS_VALUE_EXPR_P (parm))
19472 DECL_HAS_VALUE_EXPR_P (parm) = 0;
19473 DECL_IGNORED_P (parm) = 0;
19477 if ((flag_openacc || flag_openmp || flag_openmp_simd)
19478 && gimplify_omp_ctxp)
19480 delete_omp_context (gimplify_omp_ctxp);
19481 gimplify_omp_ctxp = NULL;
19484 pop_gimplify_context (outer_bind);
19485 gcc_assert (gimplify_ctxp == NULL);
19487 if (flag_checking && !seen_error ())
19488 verify_gimple_in_seq (gimple_bind_body (outer_bind));
19490 timevar_pop (TV_TREE_GIMPLIFY);
19491 input_location = saved_location;
19493 return outer_bind;
19496 typedef char *char_p; /* For DEF_VEC_P. */
19498 /* Return whether we should exclude FNDECL from instrumentation. */
19500 static bool
19501 flag_instrument_functions_exclude_p (tree fndecl)
19503 vec<char_p> *v;
19505 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
19506 if (v && v->length () > 0)
19508 const char *name;
19509 int i;
19510 char *s;
19512 name = lang_hooks.decl_printable_name (fndecl, 1);
19513 FOR_EACH_VEC_ELT (*v, i, s)
19514 if (strstr (name, s) != NULL)
19515 return true;
19518 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
19519 if (v && v->length () > 0)
19521 const char *name;
19522 int i;
19523 char *s;
19525 name = DECL_SOURCE_FILE (fndecl);
19526 FOR_EACH_VEC_ELT (*v, i, s)
19527 if (strstr (name, s) != NULL)
19528 return true;
19531 return false;
19534 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
19535 If COND_VAR is not NULL, it is a boolean variable guarding the call to
19536 the instrumentation function. IF STMT is not NULL, it is a statement
19537 to be executed just before the call to the instrumentation function. */
19539 static void
19540 build_instrumentation_call (gimple_seq *seq, enum built_in_function fncode,
19541 tree cond_var, gimple *stmt)
19543 /* The instrumentation hooks aren't going to call the instrumented
19544 function and the address they receive is expected to be matchable
19545 against symbol addresses. Make sure we don't create a trampoline,
19546 in case the current function is nested. */
19547 tree this_fn_addr = build_fold_addr_expr (current_function_decl);
19548 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
19550 tree label_true, label_false;
19551 if (cond_var)
19553 label_true = create_artificial_label (UNKNOWN_LOCATION);
19554 label_false = create_artificial_label (UNKNOWN_LOCATION);
19555 gcond *cond = gimple_build_cond (EQ_EXPR, cond_var, boolean_false_node,
19556 label_true, label_false);
19557 gimplify_seq_add_stmt (seq, cond);
19558 gimplify_seq_add_stmt (seq, gimple_build_label (label_true));
19559 gimplify_seq_add_stmt (seq, gimple_build_predict (PRED_COLD_LABEL,
19560 NOT_TAKEN));
19563 if (stmt)
19564 gimplify_seq_add_stmt (seq, stmt);
19566 tree x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
19567 gcall *call = gimple_build_call (x, 1, integer_zero_node);
19568 tree tmp_var = create_tmp_var (ptr_type_node, "return_addr");
19569 gimple_call_set_lhs (call, tmp_var);
19570 gimplify_seq_add_stmt (seq, call);
19571 x = builtin_decl_implicit (fncode);
19572 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
19573 gimplify_seq_add_stmt (seq, call);
19575 if (cond_var)
19576 gimplify_seq_add_stmt (seq, gimple_build_label (label_false));
19579 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
19580 node for the function we want to gimplify.
19582 Return the sequence of GIMPLE statements corresponding to the body
19583 of FNDECL. */
19585 void
19586 gimplify_function_tree (tree fndecl)
19588 gimple_seq seq;
19589 gbind *bind;
19591 gcc_assert (!gimple_body (fndecl));
19593 if (DECL_STRUCT_FUNCTION (fndecl))
19594 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
19595 else
19596 push_struct_function (fndecl);
19598 reset_cond_uid ();
19600 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
19601 if necessary. */
19602 cfun->curr_properties |= PROP_gimple_lva;
19604 if (asan_sanitize_use_after_scope ())
19605 asan_poisoned_variables = new hash_set<tree> ();
19606 bind = gimplify_body (fndecl, true);
19607 if (asan_poisoned_variables)
19609 delete asan_poisoned_variables;
19610 asan_poisoned_variables = NULL;
19613 /* The tree body of the function is no longer needed, replace it
19614 with the new GIMPLE body. */
19615 seq = NULL;
19616 gimple_seq_add_stmt (&seq, bind);
19617 gimple_set_body (fndecl, seq);
19619 /* If we're instrumenting function entry/exit, then prepend the call to
19620 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
19621 catch the exit hook. */
19622 /* ??? Add some way to ignore exceptions for this TFE. */
19623 if (flag_instrument_function_entry_exit
19624 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
19625 /* Do not instrument extern inline functions. */
19626 && !(DECL_DECLARED_INLINE_P (fndecl)
19627 && DECL_EXTERNAL (fndecl)
19628 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
19629 && !flag_instrument_functions_exclude_p (fndecl))
19631 gimple_seq body = NULL, cleanup = NULL;
19632 gassign *assign;
19633 tree cond_var;
19635 /* If -finstrument-functions-once is specified, generate:
19637 static volatile bool C.0 = false;
19638 bool tmp_called;
19640 tmp_called = C.0;
19641 if (!tmp_called)
19643 C.0 = true;
19644 [call profiling enter function]
19647 without specific protection for data races. */
19648 if (flag_instrument_function_entry_exit > 1)
19650 tree first_var
19651 = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
19652 VAR_DECL,
19653 create_tmp_var_name ("C"),
19654 boolean_type_node);
19655 DECL_ARTIFICIAL (first_var) = 1;
19656 DECL_IGNORED_P (first_var) = 1;
19657 TREE_STATIC (first_var) = 1;
19658 TREE_THIS_VOLATILE (first_var) = 1;
19659 TREE_USED (first_var) = 1;
19660 DECL_INITIAL (first_var) = boolean_false_node;
19661 varpool_node::add (first_var);
19663 cond_var = create_tmp_var (boolean_type_node, "tmp_called");
19664 assign = gimple_build_assign (cond_var, first_var);
19665 gimplify_seq_add_stmt (&body, assign);
19667 assign = gimple_build_assign (first_var, boolean_true_node);
19670 else
19672 cond_var = NULL_TREE;
19673 assign = NULL;
19676 build_instrumentation_call (&body, BUILT_IN_PROFILE_FUNC_ENTER,
19677 cond_var, assign);
19679 /* If -finstrument-functions-once is specified, generate:
19681 if (!tmp_called)
19682 [call profiling exit function]
19684 without specific protection for data races. */
19685 build_instrumentation_call (&cleanup, BUILT_IN_PROFILE_FUNC_EXIT,
19686 cond_var, NULL);
19688 gimple *tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
19689 gimplify_seq_add_stmt (&body, tf);
19690 gbind *new_bind = gimple_build_bind (NULL, body, NULL);
19692 /* Replace the current function body with the body
19693 wrapped in the try/finally TF. */
19694 seq = NULL;
19695 gimple_seq_add_stmt (&seq, new_bind);
19696 gimple_set_body (fndecl, seq);
19697 bind = new_bind;
19700 if (sanitize_flags_p (SANITIZE_THREAD)
19701 && param_tsan_instrument_func_entry_exit)
19703 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
19704 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
19705 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
19706 /* Replace the current function body with the body
19707 wrapped in the try/finally TF. */
19708 seq = NULL;
19709 gimple_seq_add_stmt (&seq, new_bind);
19710 gimple_set_body (fndecl, seq);
19713 DECL_SAVED_TREE (fndecl) = NULL_TREE;
19714 cfun->curr_properties |= PROP_gimple_any;
19716 pop_cfun ();
19718 dump_function (TDI_gimple, fndecl);
19721 /* Return a dummy expression of type TYPE in order to keep going after an
19722 error. */
19724 static tree
19725 dummy_object (tree type)
19727 tree t = build_int_cst (build_pointer_type (type), 0);
19728 return build2 (MEM_REF, type, t, t);
19731 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
19732 builtin function, but a very special sort of operator. */
19734 enum gimplify_status
19735 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
19736 gimple_seq *post_p ATTRIBUTE_UNUSED)
19738 tree promoted_type, have_va_type;
19739 tree valist = TREE_OPERAND (*expr_p, 0);
19740 tree type = TREE_TYPE (*expr_p);
19741 tree t, tag, aptag;
19742 location_t loc = EXPR_LOCATION (*expr_p);
19744 /* Verify that valist is of the proper type. */
19745 have_va_type = TREE_TYPE (valist);
19746 if (have_va_type == error_mark_node)
19747 return GS_ERROR;
19748 have_va_type = targetm.canonical_va_list_type (have_va_type);
19749 if (have_va_type == NULL_TREE
19750 && POINTER_TYPE_P (TREE_TYPE (valist)))
19751 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
19752 have_va_type
19753 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
19754 gcc_assert (have_va_type != NULL_TREE);
19756 /* Generate a diagnostic for requesting data of a type that cannot
19757 be passed through `...' due to type promotion at the call site. */
19758 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
19759 != type)
19761 static bool gave_help;
19762 bool warned;
19763 /* Use the expansion point to handle cases such as passing bool (defined
19764 in a system header) through `...'. */
19765 location_t xloc
19766 = expansion_point_location_if_in_system_header (loc);
19768 /* Unfortunately, this is merely undefined, rather than a constraint
19769 violation, so we cannot make this an error. If this call is never
19770 executed, the program is still strictly conforming. */
19771 auto_diagnostic_group d;
19772 warned = warning_at (xloc, 0,
19773 "%qT is promoted to %qT when passed through %<...%>",
19774 type, promoted_type);
19775 if (!gave_help && warned)
19777 gave_help = true;
19778 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
19779 promoted_type, type);
19782 /* We can, however, treat "undefined" any way we please.
19783 Call abort to encourage the user to fix the program. */
19784 if (warned)
19785 inform (xloc, "if this code is reached, the program will abort");
19786 /* Before the abort, allow the evaluation of the va_list
19787 expression to exit or longjmp. */
19788 gimplify_and_add (valist, pre_p);
19789 t = build_call_expr_loc (loc,
19790 builtin_decl_implicit (BUILT_IN_TRAP), 0);
19791 gimplify_and_add (t, pre_p);
19793 /* This is dead code, but go ahead and finish so that the
19794 mode of the result comes out right. */
19795 *expr_p = dummy_object (type);
19796 return GS_ALL_DONE;
19799 tag = build_int_cst (build_pointer_type (type), 0);
19800 aptag = build_int_cst (TREE_TYPE (valist), 0);
19802 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
19803 valist, tag, aptag);
19805 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
19806 needs to be expanded. */
19807 cfun->curr_properties &= ~PROP_gimple_lva;
19809 return GS_OK;
19812 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
19814 DST/SRC are the destination and source respectively. You can pass
19815 ungimplified trees in DST or SRC, in which case they will be
19816 converted to a gimple operand if necessary.
19818 This function returns the newly created GIMPLE_ASSIGN tuple. */
19820 gimple *
19821 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
19823 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
19824 gimplify_and_add (t, seq_p);
19825 ggc_free (t);
19826 return gimple_seq_last_stmt (*seq_p);
19829 inline hashval_t
19830 gimplify_hasher::hash (const elt_t *p)
19832 tree t = p->val;
19833 return iterative_hash_expr (t, 0);
19836 inline bool
19837 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
19839 tree t1 = p1->val;
19840 tree t2 = p2->val;
19841 enum tree_code code = TREE_CODE (t1);
19843 if (TREE_CODE (t2) != code
19844 || TREE_TYPE (t1) != TREE_TYPE (t2))
19845 return false;
19847 if (!operand_equal_p (t1, t2, 0))
19848 return false;
19850 return true;