Fix gnat.dg/opt39.adb on hppa.
[official-gcc.git] / gcc / gimplify.cc
blobade6e335da7b18c5fe5d879e18fd87264b7420eb
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2023 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-iterator.h"
46 #include "gimple-fold.h"
47 #include "tree-eh.h"
48 #include "gimplify.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "tree-hash-traits.h"
57 #include "omp-general.h"
58 #include "omp-low.h"
59 #include "gimple-low.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "dbgcnt.h"
69 #include "omp-offload.h"
70 #include "context.h"
71 #include "tree-nested.h"
73 /* Hash set of poisoned variables in a bind expr. */
74 static hash_set<tree> *asan_poisoned_variables = NULL;
76 enum gimplify_omp_var_data
78 GOVD_SEEN = 0x000001,
79 GOVD_EXPLICIT = 0x000002,
80 GOVD_SHARED = 0x000004,
81 GOVD_PRIVATE = 0x000008,
82 GOVD_FIRSTPRIVATE = 0x000010,
83 GOVD_LASTPRIVATE = 0x000020,
84 GOVD_REDUCTION = 0x000040,
85 GOVD_LOCAL = 0x00080,
86 GOVD_MAP = 0x000100,
87 GOVD_DEBUG_PRIVATE = 0x000200,
88 GOVD_PRIVATE_OUTER_REF = 0x000400,
89 GOVD_LINEAR = 0x000800,
90 GOVD_ALIGNED = 0x001000,
92 /* Flag for GOVD_MAP: don't copy back. */
93 GOVD_MAP_TO_ONLY = 0x002000,
95 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
96 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
98 GOVD_MAP_0LEN_ARRAY = 0x008000,
100 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
101 GOVD_MAP_ALWAYS_TO = 0x010000,
103 /* Flag for shared vars that are or might be stored to in the region. */
104 GOVD_WRITTEN = 0x020000,
106 /* Flag for GOVD_MAP, if it is a forced mapping. */
107 GOVD_MAP_FORCE = 0x040000,
109 /* Flag for GOVD_MAP: must be present already. */
110 GOVD_MAP_FORCE_PRESENT = 0x080000,
112 /* Flag for GOVD_MAP: only allocate. */
113 GOVD_MAP_ALLOC_ONLY = 0x100000,
115 /* Flag for GOVD_MAP: only copy back. */
116 GOVD_MAP_FROM_ONLY = 0x200000,
118 GOVD_NONTEMPORAL = 0x400000,
120 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
121 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
123 GOVD_CONDTEMP = 0x1000000,
125 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
126 GOVD_REDUCTION_INSCAN = 0x2000000,
128 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
129 GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
131 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
132 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
133 | GOVD_LOCAL)
137 enum omp_region_type
139 ORT_WORKSHARE = 0x00,
140 ORT_TASKGROUP = 0x01,
141 ORT_SIMD = 0x04,
143 ORT_PARALLEL = 0x08,
144 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
146 ORT_TASK = 0x10,
147 ORT_UNTIED_TASK = ORT_TASK | 1,
148 ORT_TASKLOOP = ORT_TASK | 2,
149 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
151 ORT_TEAMS = 0x20,
152 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
153 ORT_HOST_TEAMS = ORT_TEAMS | 2,
154 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
156 /* Data region. */
157 ORT_TARGET_DATA = 0x40,
159 /* Data region with offloading. */
160 ORT_TARGET = 0x80,
161 ORT_COMBINED_TARGET = ORT_TARGET | 1,
162 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
164 /* OpenACC variants. */
165 ORT_ACC = 0x100, /* A generic OpenACC region. */
166 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
167 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
168 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
169 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
170 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
172 /* Dummy OpenMP region, used to disable expansion of
173 DECL_VALUE_EXPRs in taskloop pre body. */
174 ORT_NONE = 0x200
177 /* Gimplify hashtable helper. */
179 struct gimplify_hasher : free_ptr_hash <elt_t>
181 static inline hashval_t hash (const elt_t *);
182 static inline bool equal (const elt_t *, const elt_t *);
185 struct gimplify_ctx
187 struct gimplify_ctx *prev_context;
189 vec<gbind *> bind_expr_stack;
190 tree temps;
191 gimple_seq conditional_cleanups;
192 tree exit_label;
193 tree return_temp;
195 vec<tree> case_labels;
196 hash_set<tree> *live_switch_vars;
197 /* The formal temporary table. Should this be persistent? */
198 hash_table<gimplify_hasher> *temp_htab;
200 int conditions;
201 unsigned into_ssa : 1;
202 unsigned allow_rhs_cond_expr : 1;
203 unsigned in_cleanup_point_expr : 1;
204 unsigned keep_stack : 1;
205 unsigned save_stack : 1;
206 unsigned in_switch_expr : 1;
209 enum gimplify_defaultmap_kind
211 GDMK_SCALAR,
212 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
213 GDMK_AGGREGATE,
214 GDMK_ALLOCATABLE,
215 GDMK_POINTER
218 struct gimplify_omp_ctx
220 struct gimplify_omp_ctx *outer_context;
221 splay_tree variables;
222 hash_set<tree> *privatized_types;
223 tree clauses;
224 /* Iteration variables in an OMP_FOR. */
225 vec<tree> loop_iter_var;
226 location_t location;
227 enum omp_clause_default_kind default_kind;
228 enum omp_region_type region_type;
229 enum tree_code code;
230 bool combined_loop;
231 bool distribute;
232 bool target_firstprivatize_array_bases;
233 bool add_safelen1;
234 bool order_concurrent;
235 bool has_depend;
236 bool in_for_exprs;
237 int defaultmap[5];
240 static struct gimplify_ctx *gimplify_ctxp;
241 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
242 static bool in_omp_construct;
244 /* Forward declaration. */
245 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
246 static hash_map<tree, tree> *oacc_declare_returns;
247 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
248 bool (*) (tree), fallback_t, bool);
249 static void prepare_gimple_addressable (tree *, gimple_seq *);
251 /* Shorter alias name for the above function for use in gimplify.cc
252 only. */
254 static inline void
255 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
257 gimple_seq_add_stmt_without_update (seq_p, gs);
260 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
261 NULL, a new sequence is allocated. This function is
262 similar to gimple_seq_add_seq, but does not scan the operands.
263 During gimplification, we need to manipulate statement sequences
264 before the def/use vectors have been constructed. */
266 static void
267 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
269 gimple_stmt_iterator si;
271 if (src == NULL)
272 return;
274 si = gsi_last (*dst_p);
275 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
279 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
280 and popping gimplify contexts. */
282 static struct gimplify_ctx *ctx_pool = NULL;
284 /* Return a gimplify context struct from the pool. */
286 static inline struct gimplify_ctx *
287 ctx_alloc (void)
289 struct gimplify_ctx * c = ctx_pool;
291 if (c)
292 ctx_pool = c->prev_context;
293 else
294 c = XNEW (struct gimplify_ctx);
296 memset (c, '\0', sizeof (*c));
297 return c;
300 /* Put gimplify context C back into the pool. */
302 static inline void
303 ctx_free (struct gimplify_ctx *c)
305 c->prev_context = ctx_pool;
306 ctx_pool = c;
309 /* Free allocated ctx stack memory. */
311 void
312 free_gimplify_stack (void)
314 struct gimplify_ctx *c;
316 while ((c = ctx_pool))
318 ctx_pool = c->prev_context;
319 free (c);
324 /* Set up a context for the gimplifier. */
326 void
327 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
329 struct gimplify_ctx *c = ctx_alloc ();
331 c->prev_context = gimplify_ctxp;
332 gimplify_ctxp = c;
333 gimplify_ctxp->into_ssa = in_ssa;
334 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
337 /* Tear down a context for the gimplifier. If BODY is non-null, then
338 put the temporaries into the outer BIND_EXPR. Otherwise, put them
339 in the local_decls.
341 BODY is not a sequence, but the first tuple in a sequence. */
343 void
344 pop_gimplify_context (gimple *body)
346 struct gimplify_ctx *c = gimplify_ctxp;
348 gcc_assert (c
349 && (!c->bind_expr_stack.exists ()
350 || c->bind_expr_stack.is_empty ()));
351 c->bind_expr_stack.release ();
352 gimplify_ctxp = c->prev_context;
354 if (body)
355 declare_vars (c->temps, body, false);
356 else
357 record_vars (c->temps);
359 delete c->temp_htab;
360 c->temp_htab = NULL;
361 ctx_free (c);
364 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
366 static void
367 gimple_push_bind_expr (gbind *bind_stmt)
369 gimplify_ctxp->bind_expr_stack.reserve (8);
370 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
373 /* Pop the first element off the stack of bindings. */
375 static void
376 gimple_pop_bind_expr (void)
378 gimplify_ctxp->bind_expr_stack.pop ();
381 /* Return the first element of the stack of bindings. */
383 gbind *
384 gimple_current_bind_expr (void)
386 return gimplify_ctxp->bind_expr_stack.last ();
389 /* Return the stack of bindings created during gimplification. */
391 vec<gbind *>
392 gimple_bind_expr_stack (void)
394 return gimplify_ctxp->bind_expr_stack;
397 /* Return true iff there is a COND_EXPR between us and the innermost
398 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
400 static bool
401 gimple_conditional_context (void)
403 return gimplify_ctxp->conditions > 0;
406 /* Note that we've entered a COND_EXPR. */
408 static void
409 gimple_push_condition (void)
411 #ifdef ENABLE_GIMPLE_CHECKING
412 if (gimplify_ctxp->conditions == 0)
413 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
414 #endif
415 ++(gimplify_ctxp->conditions);
418 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
419 now, add any conditional cleanups we've seen to the prequeue. */
421 static void
422 gimple_pop_condition (gimple_seq *pre_p)
424 int conds = --(gimplify_ctxp->conditions);
426 gcc_assert (conds >= 0);
427 if (conds == 0)
429 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
430 gimplify_ctxp->conditional_cleanups = NULL;
434 /* A stable comparison routine for use with splay trees and DECLs. */
436 static int
437 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
439 tree a = (tree) xa;
440 tree b = (tree) xb;
442 return DECL_UID (a) - DECL_UID (b);
445 /* Create a new omp construct that deals with variable remapping. */
447 static struct gimplify_omp_ctx *
448 new_omp_context (enum omp_region_type region_type)
450 struct gimplify_omp_ctx *c;
452 c = XCNEW (struct gimplify_omp_ctx);
453 c->outer_context = gimplify_omp_ctxp;
454 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
455 c->privatized_types = new hash_set<tree>;
456 c->location = input_location;
457 c->region_type = region_type;
458 if ((region_type & ORT_TASK) == 0)
459 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
460 else
461 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
462 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
463 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
464 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
465 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
466 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
468 return c;
471 /* Destroy an omp construct that deals with variable remapping. */
473 static void
474 delete_omp_context (struct gimplify_omp_ctx *c)
476 splay_tree_delete (c->variables);
477 delete c->privatized_types;
478 c->loop_iter_var.release ();
479 XDELETE (c);
482 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
483 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
485 /* Both gimplify the statement T and append it to *SEQ_P. This function
486 behaves exactly as gimplify_stmt, but you don't have to pass T as a
487 reference. */
489 void
490 gimplify_and_add (tree t, gimple_seq *seq_p)
492 gimplify_stmt (&t, seq_p);
495 /* Gimplify statement T into sequence *SEQ_P, and return the first
496 tuple in the sequence of generated tuples for this statement.
497 Return NULL if gimplifying T produced no tuples. */
499 static gimple *
500 gimplify_and_return_first (tree t, gimple_seq *seq_p)
502 gimple_stmt_iterator last = gsi_last (*seq_p);
504 gimplify_and_add (t, seq_p);
506 if (!gsi_end_p (last))
508 gsi_next (&last);
509 return gsi_stmt (last);
511 else
512 return gimple_seq_first_stmt (*seq_p);
515 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
516 LHS, or for a call argument. */
518 static bool
519 is_gimple_mem_rhs (tree t)
521 /* If we're dealing with a renamable type, either source or dest must be
522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t)))
524 return is_gimple_val (t);
525 else
526 return is_gimple_val (t) || is_gimple_lvalue (t);
529 /* Return true if T is a CALL_EXPR or an expression that can be
530 assigned to a temporary. Note that this predicate should only be
531 used during gimplification. See the rationale for this in
532 gimplify_modify_expr. */
534 static bool
535 is_gimple_reg_rhs_or_call (tree t)
537 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
538 || TREE_CODE (t) == CALL_EXPR);
541 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
542 this predicate should only be used during gimplification. See the
543 rationale for this in gimplify_modify_expr. */
545 static bool
546 is_gimple_mem_rhs_or_call (tree t)
548 /* If we're dealing with a renamable type, either source or dest must be
549 a renamed variable. */
550 if (is_gimple_reg_type (TREE_TYPE (t)))
551 return is_gimple_val (t);
552 else
553 return (is_gimple_val (t)
554 || is_gimple_lvalue (t)
555 || TREE_CLOBBER_P (t)
556 || TREE_CODE (t) == CALL_EXPR);
559 /* Create a temporary with a name derived from VAL. Subroutine of
560 lookup_tmp_var; nobody else should call this function. */
562 static inline tree
563 create_tmp_from_val (tree val)
565 /* Drop all qualifiers and address-space information from the value type. */
566 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
567 tree var = create_tmp_var (type, get_name (val));
568 return var;
571 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
572 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
574 static tree
575 lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
577 tree ret;
579 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
580 gcc_assert (!is_formal || !not_gimple_reg);
582 /* If not optimizing, never really reuse a temporary. local-alloc
583 won't allocate any variable that is used in more than one basic
584 block, which means it will go into memory, causing much extra
585 work in reload and final and poorer code generation, outweighing
586 the extra memory allocation here. */
587 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
589 ret = create_tmp_from_val (val);
590 DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
592 else
594 elt_t elt, *elt_p;
595 elt_t **slot;
597 elt.val = val;
598 if (!gimplify_ctxp->temp_htab)
599 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
600 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
601 if (*slot == NULL)
603 elt_p = XNEW (elt_t);
604 elt_p->val = val;
605 elt_p->temp = ret = create_tmp_from_val (val);
606 *slot = elt_p;
608 else
610 elt_p = *slot;
611 ret = elt_p->temp;
615 return ret;
618 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
620 static tree
621 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
622 bool is_formal, bool allow_ssa, bool not_gimple_reg)
624 tree t, mod;
626 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
627 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
628 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
629 fb_rvalue);
631 if (allow_ssa
632 && gimplify_ctxp->into_ssa
633 && is_gimple_reg_type (TREE_TYPE (val)))
635 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
636 if (! gimple_in_ssa_p (cfun))
638 const char *name = get_name (val);
639 if (name)
640 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
643 else
644 t = lookup_tmp_var (val, is_formal, not_gimple_reg);
646 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
648 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
650 /* gimplify_modify_expr might want to reduce this further. */
651 gimplify_and_add (mod, pre_p);
652 ggc_free (mod);
654 return t;
657 /* Return a formal temporary variable initialized with VAL. PRE_P is as
658 in gimplify_expr. Only use this function if:
660 1) The value of the unfactored expression represented by VAL will not
661 change between the initialization and use of the temporary, and
662 2) The temporary will not be otherwise modified.
664 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
665 and #2 means it is inappropriate for && temps.
667 For other cases, use get_initialized_tmp_var instead. */
669 tree
670 get_formal_tmp_var (tree val, gimple_seq *pre_p)
672 return internal_get_tmp_var (val, pre_p, NULL, true, true, false);
675 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
676 are as in gimplify_expr. */
678 tree
679 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
680 gimple_seq *post_p /* = NULL */,
681 bool allow_ssa /* = true */)
683 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa, false);
686 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
687 generate debug info for them; otherwise don't. */
689 void
690 declare_vars (tree vars, gimple *gs, bool debug_info)
692 tree last = vars;
693 if (last)
695 tree temps, block;
697 gbind *scope = as_a <gbind *> (gs);
699 temps = nreverse (last);
701 block = gimple_bind_block (scope);
702 gcc_assert (!block || TREE_CODE (block) == BLOCK);
703 if (!block || !debug_info)
705 DECL_CHAIN (last) = gimple_bind_vars (scope);
706 gimple_bind_set_vars (scope, temps);
708 else
710 /* We need to attach the nodes both to the BIND_EXPR and to its
711 associated BLOCK for debugging purposes. The key point here
712 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
713 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
714 if (BLOCK_VARS (block))
715 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
716 else
718 gimple_bind_set_vars (scope,
719 chainon (gimple_bind_vars (scope), temps));
720 BLOCK_VARS (block) = temps;
726 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
727 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
728 no such upper bound can be obtained. */
730 static void
731 force_constant_size (tree var)
733 /* The only attempt we make is by querying the maximum size of objects
734 of the variable's type. */
736 HOST_WIDE_INT max_size;
738 gcc_assert (VAR_P (var));
740 max_size = max_int_size_in_bytes (TREE_TYPE (var));
742 gcc_assert (max_size >= 0);
744 DECL_SIZE_UNIT (var)
745 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
746 DECL_SIZE (var)
747 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
750 /* Push the temporary variable TMP into the current binding. */
752 void
753 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
755 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
757 /* Later processing assumes that the object size is constant, which might
758 not be true at this point. Force the use of a constant upper bound in
759 this case. */
760 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
761 force_constant_size (tmp);
763 DECL_CONTEXT (tmp) = fn->decl;
764 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
766 record_vars_into (tmp, fn->decl);
769 /* Push the temporary variable TMP into the current binding. */
771 void
772 gimple_add_tmp_var (tree tmp)
774 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
776 /* Later processing assumes that the object size is constant, which might
777 not be true at this point. Force the use of a constant upper bound in
778 this case. */
779 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
780 force_constant_size (tmp);
782 DECL_CONTEXT (tmp) = current_function_decl;
783 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
785 if (gimplify_ctxp)
787 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
788 gimplify_ctxp->temps = tmp;
790 /* Mark temporaries local within the nearest enclosing parallel. */
791 if (gimplify_omp_ctxp)
793 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
794 int flag = GOVD_LOCAL | GOVD_SEEN;
795 while (ctx
796 && (ctx->region_type == ORT_WORKSHARE
797 || ctx->region_type == ORT_TASKGROUP
798 || ctx->region_type == ORT_SIMD
799 || ctx->region_type == ORT_ACC))
801 if (ctx->region_type == ORT_SIMD
802 && TREE_ADDRESSABLE (tmp)
803 && !TREE_STATIC (tmp))
805 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
806 ctx->add_safelen1 = true;
807 else if (ctx->in_for_exprs)
808 flag = GOVD_PRIVATE;
809 else
810 flag = GOVD_PRIVATE | GOVD_SEEN;
811 break;
813 ctx = ctx->outer_context;
815 if (ctx)
816 omp_add_variable (ctx, tmp, flag);
819 else if (cfun)
820 record_vars (tmp);
821 else
823 gimple_seq body_seq;
825 /* This case is for nested functions. We need to expose the locals
826 they create. */
827 body_seq = gimple_body (current_function_decl);
828 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
834 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
835 nodes that are referenced more than once in GENERIC functions. This is
836 necessary because gimplification (translation into GIMPLE) is performed
837 by modifying tree nodes in-place, so gimplication of a shared node in a
838 first context could generate an invalid GIMPLE form in a second context.
840 This is achieved with a simple mark/copy/unmark algorithm that walks the
841 GENERIC representation top-down, marks nodes with TREE_VISITED the first
842 time it encounters them, duplicates them if they already have TREE_VISITED
843 set, and finally removes the TREE_VISITED marks it has set.
845 The algorithm works only at the function level, i.e. it generates a GENERIC
846 representation of a function with no nodes shared within the function when
847 passed a GENERIC function (except for nodes that are allowed to be shared).
849 At the global level, it is also necessary to unshare tree nodes that are
850 referenced in more than one function, for the same aforementioned reason.
851 This requires some cooperation from the front-end. There are 2 strategies:
853 1. Manual unsharing. The front-end needs to call unshare_expr on every
854 expression that might end up being shared across functions.
856 2. Deep unsharing. This is an extension of regular unsharing. Instead
857 of calling unshare_expr on expressions that might be shared across
858 functions, the front-end pre-marks them with TREE_VISITED. This will
859 ensure that they are unshared on the first reference within functions
860 when the regular unsharing algorithm runs. The counterpart is that
861 this algorithm must look deeper than for manual unsharing, which is
862 specified by LANG_HOOKS_DEEP_UNSHARING.
864 If there are only few specific cases of node sharing across functions, it is
865 probably easier for a front-end to unshare the expressions manually. On the
866 contrary, if the expressions generated at the global level are as widespread
867 as expressions generated within functions, deep unsharing is very likely the
868 way to go. */
870 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
871 These nodes model computations that must be done once. If we were to
872 unshare something like SAVE_EXPR(i++), the gimplification process would
873 create wrong code. However, if DATA is non-null, it must hold a pointer
874 set that is used to unshare the subtrees of these nodes. */
876 static tree
877 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
879 tree t = *tp;
880 enum tree_code code = TREE_CODE (t);
882 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
883 copy their subtrees if we can make sure to do it only once. */
884 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
886 if (data && !((hash_set<tree> *)data)->add (t))
888 else
889 *walk_subtrees = 0;
892 /* Stop at types, decls, constants like copy_tree_r. */
893 else if (TREE_CODE_CLASS (code) == tcc_type
894 || TREE_CODE_CLASS (code) == tcc_declaration
895 || TREE_CODE_CLASS (code) == tcc_constant)
896 *walk_subtrees = 0;
898 /* Cope with the statement expression extension. */
899 else if (code == STATEMENT_LIST)
902 /* Leave the bulk of the work to copy_tree_r itself. */
903 else
904 copy_tree_r (tp, walk_subtrees, NULL);
906 return NULL_TREE;
909 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
910 If *TP has been visited already, then *TP is deeply copied by calling
911 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
913 static tree
914 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
916 tree t = *tp;
917 enum tree_code code = TREE_CODE (t);
919 /* Skip types, decls, and constants. But we do want to look at their
920 types and the bounds of types. Mark them as visited so we properly
921 unmark their subtrees on the unmark pass. If we've already seen them,
922 don't look down further. */
923 if (TREE_CODE_CLASS (code) == tcc_type
924 || TREE_CODE_CLASS (code) == tcc_declaration
925 || TREE_CODE_CLASS (code) == tcc_constant)
927 if (TREE_VISITED (t))
928 *walk_subtrees = 0;
929 else
930 TREE_VISITED (t) = 1;
933 /* If this node has been visited already, unshare it and don't look
934 any deeper. */
935 else if (TREE_VISITED (t))
937 walk_tree (tp, mostly_copy_tree_r, data, NULL);
938 *walk_subtrees = 0;
941 /* Otherwise, mark the node as visited and keep looking. */
942 else
943 TREE_VISITED (t) = 1;
945 return NULL_TREE;
948 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
949 copy_if_shared_r callback unmodified. */
951 void
952 copy_if_shared (tree *tp, void *data)
954 walk_tree (tp, copy_if_shared_r, data, NULL);
957 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
958 any nested functions. */
960 static void
961 unshare_body (tree fndecl)
963 struct cgraph_node *cgn = cgraph_node::get (fndecl);
964 /* If the language requires deep unsharing, we need a pointer set to make
965 sure we don't repeatedly unshare subtrees of unshareable nodes. */
966 hash_set<tree> *visited
967 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
969 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
970 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
971 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
973 delete visited;
975 if (cgn)
976 for (cgn = first_nested_function (cgn); cgn;
977 cgn = next_nested_function (cgn))
978 unshare_body (cgn->decl);
981 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
982 Subtrees are walked until the first unvisited node is encountered. */
984 static tree
985 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
987 tree t = *tp;
989 /* If this node has been visited, unmark it and keep looking. */
990 if (TREE_VISITED (t))
991 TREE_VISITED (t) = 0;
993 /* Otherwise, don't look any deeper. */
994 else
995 *walk_subtrees = 0;
997 return NULL_TREE;
1000 /* Unmark the visited trees rooted at *TP. */
1002 static inline void
1003 unmark_visited (tree *tp)
1005 walk_tree (tp, unmark_visited_r, NULL, NULL);
1008 /* Likewise, but mark all trees as not visited. */
1010 static void
1011 unvisit_body (tree fndecl)
1013 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1015 unmark_visited (&DECL_SAVED_TREE (fndecl));
1016 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1017 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1019 if (cgn)
1020 for (cgn = first_nested_function (cgn);
1021 cgn; cgn = next_nested_function (cgn))
1022 unvisit_body (cgn->decl);
1025 /* Unconditionally make an unshared copy of EXPR. This is used when using
1026 stored expressions which span multiple functions, such as BINFO_VTABLE,
1027 as the normal unsharing process can't tell that they're shared. */
1029 tree
1030 unshare_expr (tree expr)
1032 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1033 return expr;
1036 /* Worker for unshare_expr_without_location. */
1038 static tree
1039 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1041 if (EXPR_P (*tp))
1042 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1043 else
1044 *walk_subtrees = 0;
1045 return NULL_TREE;
1048 /* Similar to unshare_expr but also prune all expression locations
1049 from EXPR. */
1051 tree
1052 unshare_expr_without_location (tree expr)
1054 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1055 if (EXPR_P (expr))
1056 walk_tree (&expr, prune_expr_location, NULL, NULL);
1057 return expr;
1060 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1061 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1062 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1063 EXPR is the location of the EXPR. */
1065 static location_t
1066 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1068 if (!expr)
1069 return or_else;
1071 if (EXPR_HAS_LOCATION (expr))
1072 return EXPR_LOCATION (expr);
1074 if (TREE_CODE (expr) != STATEMENT_LIST)
1075 return or_else;
1077 tree_stmt_iterator i = tsi_start (expr);
1079 bool found = false;
1080 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1082 found = true;
1083 tsi_next (&i);
1086 if (!found || !tsi_one_before_end_p (i))
1087 return or_else;
1089 return rexpr_location (tsi_stmt (i), or_else);
1092 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1093 rexpr_location for the potential recursion. */
1095 static inline bool
1096 rexpr_has_location (tree expr)
1098 return rexpr_location (expr) != UNKNOWN_LOCATION;
1102 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1103 contain statements and have a value. Assign its value to a temporary
1104 and give it void_type_node. Return the temporary, or NULL_TREE if
1105 WRAPPER was already void. */
1107 tree
1108 voidify_wrapper_expr (tree wrapper, tree temp)
1110 tree type = TREE_TYPE (wrapper);
1111 if (type && !VOID_TYPE_P (type))
1113 tree *p;
1115 /* Set p to point to the body of the wrapper. Loop until we find
1116 something that isn't a wrapper. */
1117 for (p = &wrapper; p && *p; )
1119 switch (TREE_CODE (*p))
1121 case BIND_EXPR:
1122 TREE_SIDE_EFFECTS (*p) = 1;
1123 TREE_TYPE (*p) = void_type_node;
1124 /* For a BIND_EXPR, the body is operand 1. */
1125 p = &BIND_EXPR_BODY (*p);
1126 break;
1128 case CLEANUP_POINT_EXPR:
1129 case TRY_FINALLY_EXPR:
1130 case TRY_CATCH_EXPR:
1131 TREE_SIDE_EFFECTS (*p) = 1;
1132 TREE_TYPE (*p) = void_type_node;
1133 p = &TREE_OPERAND (*p, 0);
1134 break;
1136 case STATEMENT_LIST:
1138 tree_stmt_iterator i = tsi_last (*p);
1139 TREE_SIDE_EFFECTS (*p) = 1;
1140 TREE_TYPE (*p) = void_type_node;
1141 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1143 break;
1145 case COMPOUND_EXPR:
1146 /* Advance to the last statement. Set all container types to
1147 void. */
1148 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1150 TREE_SIDE_EFFECTS (*p) = 1;
1151 TREE_TYPE (*p) = void_type_node;
1153 break;
1155 case TRANSACTION_EXPR:
1156 TREE_SIDE_EFFECTS (*p) = 1;
1157 TREE_TYPE (*p) = void_type_node;
1158 p = &TRANSACTION_EXPR_BODY (*p);
1159 break;
1161 default:
1162 /* Assume that any tree upon which voidify_wrapper_expr is
1163 directly called is a wrapper, and that its body is op0. */
1164 if (p == &wrapper)
1166 TREE_SIDE_EFFECTS (*p) = 1;
1167 TREE_TYPE (*p) = void_type_node;
1168 p = &TREE_OPERAND (*p, 0);
1169 break;
1171 goto out;
1175 out:
1176 if (p == NULL || IS_EMPTY_STMT (*p))
1177 temp = NULL_TREE;
1178 else if (temp)
1180 /* The wrapper is on the RHS of an assignment that we're pushing
1181 down. */
1182 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1183 || TREE_CODE (temp) == MODIFY_EXPR);
1184 TREE_OPERAND (temp, 1) = *p;
1185 *p = temp;
1187 else
1189 temp = create_tmp_var (type, "retval");
1190 *p = build2 (INIT_EXPR, type, temp, *p);
1193 return temp;
1196 return NULL_TREE;
1199 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1200 a temporary through which they communicate. */
1202 static void
1203 build_stack_save_restore (gcall **save, gcall **restore)
1205 tree tmp_var;
1207 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1208 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1209 gimple_call_set_lhs (*save, tmp_var);
1211 *restore
1212 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1213 1, tmp_var);
1216 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1218 static tree
1219 build_asan_poison_call_expr (tree decl)
1221 /* Do not poison variables that have size equal to zero. */
1222 tree unit_size = DECL_SIZE_UNIT (decl);
1223 if (zerop (unit_size))
1224 return NULL_TREE;
1226 tree base = build_fold_addr_expr (decl);
1228 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1229 void_type_node, 3,
1230 build_int_cst (integer_type_node,
1231 ASAN_MARK_POISON),
1232 base, unit_size);
1235 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1236 on POISON flag, shadow memory of a DECL variable. The call will be
1237 put on location identified by IT iterator, where BEFORE flag drives
1238 position where the stmt will be put. */
1240 static void
1241 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1242 bool before)
1244 tree unit_size = DECL_SIZE_UNIT (decl);
1245 tree base = build_fold_addr_expr (decl);
1247 /* Do not poison variables that have size equal to zero. */
1248 if (zerop (unit_size))
1249 return;
1251 /* It's necessary to have all stack variables aligned to ASAN granularity
1252 bytes. */
1253 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1254 unsigned shadow_granularity
1255 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1256 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1257 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1259 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1261 gimple *g
1262 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1263 build_int_cst (integer_type_node, flags),
1264 base, unit_size);
1266 if (before)
1267 gsi_insert_before (it, g, GSI_NEW_STMT);
1268 else
1269 gsi_insert_after (it, g, GSI_NEW_STMT);
1272 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1273 either poisons or unpoisons a DECL. Created statement is appended
1274 to SEQ_P gimple sequence. */
1276 static void
1277 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1279 gimple_stmt_iterator it = gsi_last (*seq_p);
1280 bool before = false;
1282 if (gsi_end_p (it))
1283 before = true;
1285 asan_poison_variable (decl, poison, &it, before);
1288 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1290 static int
1291 sort_by_decl_uid (const void *a, const void *b)
1293 const tree *t1 = (const tree *)a;
1294 const tree *t2 = (const tree *)b;
1296 int uid1 = DECL_UID (*t1);
1297 int uid2 = DECL_UID (*t2);
1299 if (uid1 < uid2)
1300 return -1;
1301 else if (uid1 > uid2)
1302 return 1;
1303 else
1304 return 0;
1307 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1308 depending on POISON flag. Created statement is appended
1309 to SEQ_P gimple sequence. */
1311 static void
1312 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1314 unsigned c = variables->elements ();
1315 if (c == 0)
1316 return;
1318 auto_vec<tree> sorted_variables (c);
1320 for (hash_set<tree>::iterator it = variables->begin ();
1321 it != variables->end (); ++it)
1322 sorted_variables.safe_push (*it);
1324 sorted_variables.qsort (sort_by_decl_uid);
1326 unsigned i;
1327 tree var;
1328 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1330 asan_poison_variable (var, poison, seq_p);
1332 /* Add use_after_scope_memory attribute for the variable in order
1333 to prevent re-written into SSA. */
1334 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1335 DECL_ATTRIBUTES (var)))
1336 DECL_ATTRIBUTES (var)
1337 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1338 integer_one_node,
1339 DECL_ATTRIBUTES (var));
1343 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1345 static enum gimplify_status
1346 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1348 tree bind_expr = *expr_p;
1349 bool old_keep_stack = gimplify_ctxp->keep_stack;
1350 bool old_save_stack = gimplify_ctxp->save_stack;
1351 tree t;
1352 gbind *bind_stmt;
1353 gimple_seq body, cleanup;
1354 gcall *stack_save;
1355 location_t start_locus = 0, end_locus = 0;
1356 tree ret_clauses = NULL;
1358 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1360 /* Mark variables seen in this bind expr. */
1361 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1363 if (VAR_P (t))
1365 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1367 /* Mark variable as local. */
1368 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1370 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1371 || splay_tree_lookup (ctx->variables,
1372 (splay_tree_key) t) == NULL)
1374 int flag = GOVD_LOCAL;
1375 if (ctx->region_type == ORT_SIMD
1376 && TREE_ADDRESSABLE (t)
1377 && !TREE_STATIC (t))
1379 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1380 ctx->add_safelen1 = true;
1381 else
1382 flag = GOVD_PRIVATE;
1384 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1386 /* Static locals inside of target construct or offloaded
1387 routines need to be "omp declare target". */
1388 if (TREE_STATIC (t))
1389 for (; ctx; ctx = ctx->outer_context)
1390 if ((ctx->region_type & ORT_TARGET) != 0)
1392 if (!lookup_attribute ("omp declare target",
1393 DECL_ATTRIBUTES (t)))
1395 tree id = get_identifier ("omp declare target");
1396 DECL_ATTRIBUTES (t)
1397 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1398 varpool_node *node = varpool_node::get (t);
1399 if (node)
1401 node->offloadable = 1;
1402 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1404 g->have_offload = true;
1405 if (!in_lto_p)
1406 vec_safe_push (offload_vars, t);
1410 break;
1414 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1416 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1417 cfun->has_local_explicit_reg_vars = true;
1421 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1422 BIND_EXPR_BLOCK (bind_expr));
1423 gimple_push_bind_expr (bind_stmt);
1425 gimplify_ctxp->keep_stack = false;
1426 gimplify_ctxp->save_stack = false;
1428 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1429 body = NULL;
1430 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1431 gimple_bind_set_body (bind_stmt, body);
1433 /* Source location wise, the cleanup code (stack_restore and clobbers)
1434 belongs to the end of the block, so propagate what we have. The
1435 stack_save operation belongs to the beginning of block, which we can
1436 infer from the bind_expr directly if the block has no explicit
1437 assignment. */
1438 if (BIND_EXPR_BLOCK (bind_expr))
1440 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1441 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1443 if (start_locus == 0)
1444 start_locus = EXPR_LOCATION (bind_expr);
1446 cleanup = NULL;
1447 stack_save = NULL;
1449 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1450 the stack space allocated to the VLAs. */
1451 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1453 gcall *stack_restore;
1455 /* Save stack on entry and restore it on exit. Add a try_finally
1456 block to achieve this. */
1457 build_stack_save_restore (&stack_save, &stack_restore);
1459 gimple_set_location (stack_save, start_locus);
1460 gimple_set_location (stack_restore, end_locus);
1462 gimplify_seq_add_stmt (&cleanup, stack_restore);
1465 /* Add clobbers for all variables that go out of scope. */
1466 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1468 if (VAR_P (t)
1469 && !is_global_var (t)
1470 && DECL_CONTEXT (t) == current_function_decl)
1472 if (!DECL_HARD_REGISTER (t)
1473 && !TREE_THIS_VOLATILE (t)
1474 && !DECL_HAS_VALUE_EXPR_P (t)
1475 /* Only care for variables that have to be in memory. Others
1476 will be rewritten into SSA names, hence moved to the
1477 top-level. */
1478 && !is_gimple_reg (t)
1479 && flag_stack_reuse != SR_NONE)
1481 tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_EOL);
1482 gimple *clobber_stmt;
1483 clobber_stmt = gimple_build_assign (t, clobber);
1484 gimple_set_location (clobber_stmt, end_locus);
1485 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1488 if (flag_openacc && oacc_declare_returns != NULL)
1490 tree key = t;
1491 if (DECL_HAS_VALUE_EXPR_P (key))
1493 key = DECL_VALUE_EXPR (key);
1494 if (TREE_CODE (key) == INDIRECT_REF)
1495 key = TREE_OPERAND (key, 0);
1497 tree *c = oacc_declare_returns->get (key);
1498 if (c != NULL)
1500 if (ret_clauses)
1501 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1503 ret_clauses = unshare_expr (*c);
1505 oacc_declare_returns->remove (key);
1507 if (oacc_declare_returns->is_empty ())
1509 delete oacc_declare_returns;
1510 oacc_declare_returns = NULL;
1516 if (asan_poisoned_variables != NULL
1517 && asan_poisoned_variables->contains (t))
1519 asan_poisoned_variables->remove (t);
1520 asan_poison_variable (t, true, &cleanup);
1523 if (gimplify_ctxp->live_switch_vars != NULL
1524 && gimplify_ctxp->live_switch_vars->contains (t))
1525 gimplify_ctxp->live_switch_vars->remove (t);
1528 if (ret_clauses)
1530 gomp_target *stmt;
1531 gimple_stmt_iterator si = gsi_start (cleanup);
1533 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1534 ret_clauses);
1535 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1538 if (cleanup)
1540 gtry *gs;
1541 gimple_seq new_body;
1543 new_body = NULL;
1544 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1545 GIMPLE_TRY_FINALLY);
1547 if (stack_save)
1548 gimplify_seq_add_stmt (&new_body, stack_save);
1549 gimplify_seq_add_stmt (&new_body, gs);
1550 gimple_bind_set_body (bind_stmt, new_body);
1553 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1554 if (!gimplify_ctxp->keep_stack)
1555 gimplify_ctxp->keep_stack = old_keep_stack;
1556 gimplify_ctxp->save_stack = old_save_stack;
1558 gimple_pop_bind_expr ();
1560 gimplify_seq_add_stmt (pre_p, bind_stmt);
1562 if (temp)
1564 *expr_p = temp;
1565 return GS_OK;
1568 *expr_p = NULL_TREE;
1569 return GS_ALL_DONE;
1572 /* Maybe add early return predict statement to PRE_P sequence. */
1574 static void
1575 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1577 /* If we are not in a conditional context, add PREDICT statement. */
1578 if (gimple_conditional_context ())
1580 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1581 NOT_TAKEN);
1582 gimplify_seq_add_stmt (pre_p, predict);
1586 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1587 GIMPLE value, it is assigned to a new temporary and the statement is
1588 re-written to return the temporary.
1590 PRE_P points to the sequence where side effects that must happen before
1591 STMT should be stored. */
1593 static enum gimplify_status
1594 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1596 greturn *ret;
1597 tree ret_expr = TREE_OPERAND (stmt, 0);
1598 tree result_decl, result;
1600 if (ret_expr == error_mark_node)
1601 return GS_ERROR;
1603 if (!ret_expr
1604 || TREE_CODE (ret_expr) == RESULT_DECL)
1606 maybe_add_early_return_predict_stmt (pre_p);
1607 greturn *ret = gimple_build_return (ret_expr);
1608 copy_warning (ret, stmt);
1609 gimplify_seq_add_stmt (pre_p, ret);
1610 return GS_ALL_DONE;
1613 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1614 result_decl = NULL_TREE;
1615 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1617 /* Used in C++ for handling EH cleanup of the return value if a local
1618 cleanup throws. Assume the front-end knows what it's doing. */
1619 result_decl = DECL_RESULT (current_function_decl);
1620 /* But crash if we end up trying to modify ret_expr below. */
1621 ret_expr = NULL_TREE;
1623 else
1625 result_decl = TREE_OPERAND (ret_expr, 0);
1627 /* See through a return by reference. */
1628 if (TREE_CODE (result_decl) == INDIRECT_REF)
1629 result_decl = TREE_OPERAND (result_decl, 0);
1631 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1632 || TREE_CODE (ret_expr) == INIT_EXPR)
1633 && TREE_CODE (result_decl) == RESULT_DECL);
1636 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1637 Recall that aggregate_value_p is FALSE for any aggregate type that is
1638 returned in registers. If we're returning values in registers, then
1639 we don't want to extend the lifetime of the RESULT_DECL, particularly
1640 across another call. In addition, for those aggregates for which
1641 hard_function_value generates a PARALLEL, we'll die during normal
1642 expansion of structure assignments; there's special code in expand_return
1643 to handle this case that does not exist in expand_expr. */
1644 if (!result_decl)
1645 result = NULL_TREE;
1646 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1648 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1650 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1651 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1652 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1653 should be effectively allocated by the caller, i.e. all calls to
1654 this function must be subject to the Return Slot Optimization. */
1655 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1656 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1658 result = result_decl;
1660 else if (gimplify_ctxp->return_temp)
1661 result = gimplify_ctxp->return_temp;
1662 else
1664 result = create_tmp_reg (TREE_TYPE (result_decl));
1666 /* ??? With complex control flow (usually involving abnormal edges),
1667 we can wind up warning about an uninitialized value for this. Due
1668 to how this variable is constructed and initialized, this is never
1669 true. Give up and never warn. */
1670 suppress_warning (result, OPT_Wuninitialized);
1672 gimplify_ctxp->return_temp = result;
1675 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1676 Then gimplify the whole thing. */
1677 if (result != result_decl)
1678 TREE_OPERAND (ret_expr, 0) = result;
1680 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1682 maybe_add_early_return_predict_stmt (pre_p);
1683 ret = gimple_build_return (result);
1684 copy_warning (ret, stmt);
1685 gimplify_seq_add_stmt (pre_p, ret);
1687 return GS_ALL_DONE;
1690 /* Gimplify a variable-length array DECL. */
1692 static void
1693 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1695 /* This is a variable-sized decl. Simplify its size and mark it
1696 for deferred expansion. */
1697 tree t, addr, ptr_type;
1699 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1700 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1702 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1703 if (DECL_HAS_VALUE_EXPR_P (decl))
1704 return;
1706 /* All occurrences of this decl in final gimplified code will be
1707 replaced by indirection. Setting DECL_VALUE_EXPR does two
1708 things: First, it lets the rest of the gimplifier know what
1709 replacement to use. Second, it lets the debug info know
1710 where to find the value. */
1711 ptr_type = build_pointer_type (TREE_TYPE (decl));
1712 addr = create_tmp_var (ptr_type, get_name (decl));
1713 DECL_IGNORED_P (addr) = 0;
1714 t = build_fold_indirect_ref (addr);
1715 TREE_THIS_NOTRAP (t) = 1;
1716 SET_DECL_VALUE_EXPR (decl, t);
1717 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1719 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1720 max_int_size_in_bytes (TREE_TYPE (decl)));
1721 /* The call has been built for a variable-sized object. */
1722 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1723 t = fold_convert (ptr_type, t);
1724 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1726 gimplify_and_add (t, seq_p);
1728 /* Record the dynamic allocation associated with DECL if requested. */
1729 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1730 record_dynamic_alloc (decl);
1733 /* A helper function to be called via walk_tree. Mark all labels under *TP
1734 as being forced. To be called for DECL_INITIAL of static variables. */
1736 static tree
1737 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1739 if (TYPE_P (*tp))
1740 *walk_subtrees = 0;
1741 if (TREE_CODE (*tp) == LABEL_DECL)
1743 FORCED_LABEL (*tp) = 1;
1744 cfun->has_forced_label_in_static = 1;
1747 return NULL_TREE;
1750 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1751 Build a call to internal const function DEFERRED_INIT:
1752 1st argument: SIZE of the DECL;
1753 2nd argument: INIT_TYPE;
1754 3rd argument: NAME of the DECL;
1756 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1758 static void
1759 gimple_add_init_for_auto_var (tree decl,
1760 enum auto_init_type init_type,
1761 gimple_seq *seq_p)
1763 gcc_assert (auto_var_p (decl));
1764 gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
1765 location_t loc = EXPR_LOCATION (decl);
1766 tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
1768 tree init_type_node
1769 = build_int_cst (integer_type_node, (int) init_type);
1771 tree decl_name = NULL_TREE;
1772 if (DECL_NAME (decl))
1774 decl_name = build_string_literal (DECL_NAME (decl));
1776 else
1778 char decl_name_anonymous[3 + (HOST_BITS_PER_INT + 2) / 3];
1779 sprintf (decl_name_anonymous, "D.%u", DECL_UID (decl));
1780 decl_name = build_string_literal (decl_name_anonymous);
1783 tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
1784 TREE_TYPE (decl), 3,
1785 decl_size, init_type_node,
1786 decl_name);
1788 gimplify_assign (decl, call, seq_p);
1791 /* Generate padding initialization for automatic vairable DECL.
1792 C guarantees that brace-init with fewer initializers than members
1793 aggregate will initialize the rest of the aggregate as-if it were
1794 static initialization. In turn static initialization guarantees
1795 that padding is initialized to zero. So, we always initialize paddings
1796 to zeroes regardless INIT_TYPE.
1797 To do the padding initialization, we insert a call to
1798 __builtin_clear_padding (&decl, 0, for_auto_init = true).
1799 Note, we add an additional dummy argument for __builtin_clear_padding,
1800 'for_auto_init' to distinguish whether this call is for automatic
1801 variable initialization or not.
1803 static void
1804 gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
1805 gimple_seq *seq_p)
1807 tree addr_of_decl = NULL_TREE;
1808 tree fn = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
1810 if (is_vla)
1812 /* The temporary address variable for this vla should be
1813 created in gimplify_vla_decl. */
1814 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
1815 gcc_assert (TREE_CODE (DECL_VALUE_EXPR (decl)) == INDIRECT_REF);
1816 addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
1818 else
1820 mark_addressable (decl);
1821 addr_of_decl = build_fold_addr_expr (decl);
1824 gimple *call = gimple_build_call (fn, 2, addr_of_decl,
1825 build_one_cst (TREE_TYPE (addr_of_decl)));
1826 gimplify_seq_add_stmt (seq_p, call);
1829 /* Return true if the DECL need to be automaticly initialized by the
1830 compiler. */
1831 static bool
1832 is_var_need_auto_init (tree decl)
1834 if (auto_var_p (decl)
1835 && (TREE_CODE (decl) != VAR_DECL
1836 || !DECL_HARD_REGISTER (decl))
1837 && (flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
1838 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl)))
1839 && !OPAQUE_TYPE_P (TREE_TYPE (decl))
1840 && !is_empty_type (TREE_TYPE (decl)))
1841 return true;
1842 return false;
1845 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1846 and initialization explicit. */
1848 static enum gimplify_status
1849 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1851 tree stmt = *stmt_p;
1852 tree decl = DECL_EXPR_DECL (stmt);
1854 *stmt_p = NULL_TREE;
1856 if (TREE_TYPE (decl) == error_mark_node)
1857 return GS_ERROR;
1859 if ((TREE_CODE (decl) == TYPE_DECL
1860 || VAR_P (decl))
1861 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1863 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1864 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1865 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1868 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1869 in case its size expressions contain problematic nodes like CALL_EXPR. */
1870 if (TREE_CODE (decl) == TYPE_DECL
1871 && DECL_ORIGINAL_TYPE (decl)
1872 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1874 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1875 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1876 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1879 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1881 tree init = DECL_INITIAL (decl);
1882 bool is_vla = false;
1883 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
1884 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
1885 If the decl has VALUE_EXPR that was created by FE (usually
1886 C++FE), it's a proxy varaible, and FE already initialized
1887 the VALUE_EXPR of it, we should not initialize it anymore. */
1888 bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
1890 poly_uint64 size;
1891 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1892 || (!TREE_STATIC (decl)
1893 && flag_stack_check == GENERIC_STACK_CHECK
1894 && maybe_gt (size,
1895 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1897 gimplify_vla_decl (decl, seq_p);
1898 is_vla = true;
1901 if (asan_poisoned_variables
1902 && !is_vla
1903 && TREE_ADDRESSABLE (decl)
1904 && !TREE_STATIC (decl)
1905 && !DECL_HAS_VALUE_EXPR_P (decl)
1906 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1907 && dbg_cnt (asan_use_after_scope)
1908 && !gimplify_omp_ctxp
1909 /* GNAT introduces temporaries to hold return values of calls in
1910 initializers of variables defined in other units, so the
1911 declaration of the variable is discarded completely. We do not
1912 want to issue poison calls for such dropped variables. */
1913 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
1914 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
1916 asan_poisoned_variables->add (decl);
1917 asan_poison_variable (decl, false, seq_p);
1918 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1919 gimplify_ctxp->live_switch_vars->add (decl);
1922 /* Some front ends do not explicitly declare all anonymous
1923 artificial variables. We compensate here by declaring the
1924 variables, though it would be better if the front ends would
1925 explicitly declare them. */
1926 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1927 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1928 gimple_add_tmp_var (decl);
1930 if (init && init != error_mark_node)
1932 if (!TREE_STATIC (decl))
1934 DECL_INITIAL (decl) = NULL_TREE;
1935 init = build2 (INIT_EXPR, void_type_node, decl, init);
1936 gimplify_and_add (init, seq_p);
1937 ggc_free (init);
1938 /* Clear TREE_READONLY if we really have an initialization. */
1939 if (!DECL_INITIAL (decl)
1940 && !omp_privatize_by_reference (decl))
1941 TREE_READONLY (decl) = 0;
1943 else
1944 /* We must still examine initializers for static variables
1945 as they may contain a label address. */
1946 walk_tree (&init, force_labels_r, NULL, NULL);
1948 /* When there is no explicit initializer, if the user requested,
1949 We should insert an artifical initializer for this automatic
1950 variable. */
1951 else if (is_var_need_auto_init (decl)
1952 && !decl_had_value_expr_p)
1954 gimple_add_init_for_auto_var (decl,
1955 flag_auto_var_init,
1956 seq_p);
1957 /* The expanding of a call to the above .DEFERRED_INIT will apply
1958 block initialization to the whole space covered by this variable.
1959 As a result, all the paddings will be initialized to zeroes
1960 for zero initialization and 0xFE byte-repeatable patterns for
1961 pattern initialization.
1962 In order to make the paddings as zeroes for pattern init, We
1963 should add a call to __builtin_clear_padding to clear the
1964 paddings to zero in compatiple with CLANG.
1965 We cannot insert this call if the variable is a gimple register
1966 since __builtin_clear_padding will take the address of the
1967 variable. As a result, if a long double/_Complex long double
1968 variable will spilled into stack later, its padding is 0XFE. */
1969 if (flag_auto_var_init == AUTO_INIT_PATTERN
1970 && !is_gimple_reg (decl)
1971 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
1972 gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
1976 return GS_ALL_DONE;
1979 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1980 and replacing the LOOP_EXPR with goto, but if the loop contains an
1981 EXIT_EXPR, we need to append a label for it to jump to. */
1983 static enum gimplify_status
1984 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1986 tree saved_label = gimplify_ctxp->exit_label;
1987 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1989 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1991 gimplify_ctxp->exit_label = NULL_TREE;
1993 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1995 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1997 if (gimplify_ctxp->exit_label)
1998 gimplify_seq_add_stmt (pre_p,
1999 gimple_build_label (gimplify_ctxp->exit_label));
2001 gimplify_ctxp->exit_label = saved_label;
2003 *expr_p = NULL;
2004 return GS_ALL_DONE;
2007 /* Gimplify a statement list onto a sequence. These may be created either
2008 by an enlightened front-end, or by shortcut_cond_expr. */
2010 static enum gimplify_status
2011 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2013 tree temp = voidify_wrapper_expr (*expr_p, NULL);
2015 tree_stmt_iterator i = tsi_start (*expr_p);
2017 while (!tsi_end_p (i))
2019 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2020 tsi_delink (&i);
2023 if (temp)
2025 *expr_p = temp;
2026 return GS_OK;
2029 return GS_ALL_DONE;
2033 /* Emit warning for the unreachable statment STMT if needed.
2034 Return the gimple itself when the warning is emitted, otherwise
2035 return NULL. */
2036 static gimple *
2037 emit_warn_switch_unreachable (gimple *stmt)
2039 if (gimple_code (stmt) == GIMPLE_GOTO
2040 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2041 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2042 /* Don't warn for compiler-generated gotos. These occur
2043 in Duff's devices, for example. */
2044 return NULL;
2045 else if ((flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2046 && ((gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2047 || (gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2048 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2049 || (is_gimple_assign (stmt)
2050 && gimple_assign_single_p (stmt)
2051 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2052 && gimple_call_internal_p (
2053 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)),
2054 IFN_DEFERRED_INIT))))
2055 /* Don't warn for compiler-generated initializations for
2056 -ftrivial-auto-var-init.
2057 There are 3 cases:
2058 case 1: a call to .DEFERRED_INIT;
2059 case 2: a call to __builtin_clear_padding with the 2nd argument is
2060 present and non-zero;
2061 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2062 that has the LHS of .DEFERRED_INIT as the RHS as following:
2063 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2064 i1 = _1. */
2065 return NULL;
2066 else
2067 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
2068 "statement will never be executed");
2069 return stmt;
2072 /* Callback for walk_gimple_seq. */
2074 static tree
2075 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2076 bool *handled_ops_p,
2077 struct walk_stmt_info *wi)
2079 gimple *stmt = gsi_stmt (*gsi_p);
2080 bool unreachable_issued = wi->info != NULL;
2082 *handled_ops_p = true;
2083 switch (gimple_code (stmt))
2085 case GIMPLE_TRY:
2086 /* A compiler-generated cleanup or a user-written try block.
2087 If it's empty, don't dive into it--that would result in
2088 worse location info. */
2089 if (gimple_try_eval (stmt) == NULL)
2091 if (warn_switch_unreachable && !unreachable_issued)
2092 wi->info = emit_warn_switch_unreachable (stmt);
2094 /* Stop when auto var init warning is not on. */
2095 if (!warn_trivial_auto_var_init)
2096 return integer_zero_node;
2098 /* Fall through. */
2099 case GIMPLE_BIND:
2100 case GIMPLE_CATCH:
2101 case GIMPLE_EH_FILTER:
2102 case GIMPLE_TRANSACTION:
2103 /* Walk the sub-statements. */
2104 *handled_ops_p = false;
2105 break;
2107 case GIMPLE_DEBUG:
2108 /* Ignore these. We may generate them before declarations that
2109 are never executed. If there's something to warn about,
2110 there will be non-debug stmts too, and we'll catch those. */
2111 break;
2113 case GIMPLE_LABEL:
2114 /* Stop till the first Label. */
2115 return integer_zero_node;
2116 case GIMPLE_CALL:
2117 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2119 *handled_ops_p = false;
2120 break;
2122 if (warn_trivial_auto_var_init
2123 && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2124 && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2126 /* Get the variable name from the 3rd argument of call. */
2127 tree var_name = gimple_call_arg (stmt, 2);
2128 var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2129 const char *var_name_str = TREE_STRING_POINTER (var_name);
2131 warning_at (gimple_location (stmt), OPT_Wtrivial_auto_var_init,
2132 "%qs cannot be initialized with"
2133 "%<-ftrivial-auto-var_init%>",
2134 var_name_str);
2135 break;
2138 /* Fall through. */
2139 default:
2140 /* check the first "real" statement (not a decl/lexical scope/...), issue
2141 warning if needed. */
2142 if (warn_switch_unreachable && !unreachable_issued)
2143 wi->info = emit_warn_switch_unreachable (stmt);
2144 /* Stop when auto var init warning is not on. */
2145 if (!warn_trivial_auto_var_init)
2146 return integer_zero_node;
2147 break;
2149 return NULL_TREE;
2153 /* Possibly warn about unreachable statements between switch's controlling
2154 expression and the first case. Also warn about -ftrivial-auto-var-init
2155 cannot initialize the auto variable under such situation.
2156 SEQ is the body of a switch expression. */
2158 static void
2159 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2161 if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2162 /* This warning doesn't play well with Fortran when optimizations
2163 are on. */
2164 || lang_GNU_Fortran ()
2165 || seq == NULL)
2166 return;
2168 struct walk_stmt_info wi;
2170 memset (&wi, 0, sizeof (wi));
2171 walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2175 /* A label entry that pairs label and a location. */
2176 struct label_entry
2178 tree label;
2179 location_t loc;
2182 /* Find LABEL in vector of label entries VEC. */
2184 static struct label_entry *
2185 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2187 unsigned int i;
2188 struct label_entry *l;
2190 FOR_EACH_VEC_ELT (*vec, i, l)
2191 if (l->label == label)
2192 return l;
2193 return NULL;
2196 /* Return true if LABEL, a LABEL_DECL, represents a case label
2197 in a vector of labels CASES. */
2199 static bool
2200 case_label_p (const vec<tree> *cases, tree label)
2202 unsigned int i;
2203 tree l;
2205 FOR_EACH_VEC_ELT (*cases, i, l)
2206 if (CASE_LABEL (l) == label)
2207 return true;
2208 return false;
2211 /* Find the last nondebug statement in a scope STMT. */
2213 static gimple *
2214 last_stmt_in_scope (gimple *stmt)
2216 if (!stmt)
2217 return NULL;
2219 switch (gimple_code (stmt))
2221 case GIMPLE_BIND:
2223 gbind *bind = as_a <gbind *> (stmt);
2224 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2225 return last_stmt_in_scope (stmt);
2228 case GIMPLE_TRY:
2230 gtry *try_stmt = as_a <gtry *> (stmt);
2231 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2232 gimple *last_eval = last_stmt_in_scope (stmt);
2233 if (gimple_stmt_may_fallthru (last_eval)
2234 && (last_eval == NULL
2235 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2236 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2238 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2239 return last_stmt_in_scope (stmt);
2241 else
2242 return last_eval;
2245 case GIMPLE_DEBUG:
2246 gcc_unreachable ();
2248 default:
2249 return stmt;
2253 /* Collect labels that may fall through into LABELS and return the statement
2254 preceding another case label, or a user-defined label. Store a location
2255 useful to give warnings at *PREVLOC (usually the location of the returned
2256 statement or of its surrounding scope). */
2258 static gimple *
2259 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2260 auto_vec <struct label_entry> *labels,
2261 location_t *prevloc)
2263 gimple *prev = NULL;
2265 *prevloc = UNKNOWN_LOCATION;
2268 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2270 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2271 which starts on a GIMPLE_SWITCH and ends with a break label.
2272 Handle that as a single statement that can fall through. */
2273 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2274 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2275 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2276 if (last
2277 && gimple_code (first) == GIMPLE_SWITCH
2278 && gimple_code (last) == GIMPLE_LABEL)
2280 tree label = gimple_label_label (as_a <glabel *> (last));
2281 if (SWITCH_BREAK_LABEL_P (label))
2283 prev = bind;
2284 gsi_next (gsi_p);
2285 continue;
2289 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2290 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2292 /* Nested scope. Only look at the last statement of
2293 the innermost scope. */
2294 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2295 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2296 if (last)
2298 prev = last;
2299 /* It might be a label without a location. Use the
2300 location of the scope then. */
2301 if (!gimple_has_location (prev))
2302 *prevloc = bind_loc;
2304 gsi_next (gsi_p);
2305 continue;
2308 /* Ifs are tricky. */
2309 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2311 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2312 tree false_lab = gimple_cond_false_label (cond_stmt);
2313 location_t if_loc = gimple_location (cond_stmt);
2315 /* If we have e.g.
2316 if (i > 1) goto <D.2259>; else goto D;
2317 we can't do much with the else-branch. */
2318 if (!DECL_ARTIFICIAL (false_lab))
2319 break;
2321 /* Go on until the false label, then one step back. */
2322 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2324 gimple *stmt = gsi_stmt (*gsi_p);
2325 if (gimple_code (stmt) == GIMPLE_LABEL
2326 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2327 break;
2330 /* Not found? Oops. */
2331 if (gsi_end_p (*gsi_p))
2332 break;
2334 /* A dead label can't fall through. */
2335 if (!UNUSED_LABEL_P (false_lab))
2337 struct label_entry l = { false_lab, if_loc };
2338 labels->safe_push (l);
2341 /* Go to the last statement of the then branch. */
2342 gsi_prev (gsi_p);
2344 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2345 <D.1759>:
2346 <stmt>;
2347 goto <D.1761>;
2348 <D.1760>:
2350 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2351 && !gimple_has_location (gsi_stmt (*gsi_p)))
2353 /* Look at the statement before, it might be
2354 attribute fallthrough, in which case don't warn. */
2355 gsi_prev (gsi_p);
2356 bool fallthru_before_dest
2357 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2358 gsi_next (gsi_p);
2359 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2360 if (!fallthru_before_dest)
2362 struct label_entry l = { goto_dest, if_loc };
2363 labels->safe_push (l);
2366 /* This case is about
2367 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2368 <D.2022>:
2369 n = n + 1; // #1
2370 <D.2023>: // #2
2371 <D.1988>: // #3
2372 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2373 through to #3. So set PREV to #1. */
2374 else if (UNUSED_LABEL_P (false_lab))
2375 prev = gsi_stmt (*gsi_p);
2377 /* And move back. */
2378 gsi_next (gsi_p);
2381 /* Remember the last statement. Skip labels that are of no interest
2382 to us. */
2383 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2385 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2386 if (find_label_entry (labels, label))
2387 prev = gsi_stmt (*gsi_p);
2389 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2391 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2393 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2394 prev = gsi_stmt (*gsi_p);
2395 gsi_next (gsi_p);
2397 while (!gsi_end_p (*gsi_p)
2398 /* Stop if we find a case or a user-defined label. */
2399 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2400 || !gimple_has_location (gsi_stmt (*gsi_p))));
2402 if (prev && gimple_has_location (prev))
2403 *prevloc = gimple_location (prev);
2404 return prev;
2407 /* Return true if the switch fallthough warning should occur. LABEL is
2408 the label statement that we're falling through to. */
2410 static bool
2411 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2413 gimple_stmt_iterator gsi = *gsi_p;
2415 /* Don't warn if the label is marked with a "falls through" comment. */
2416 if (FALLTHROUGH_LABEL_P (label))
2417 return false;
2419 /* Don't warn for non-case labels followed by a statement:
2420 case 0:
2421 foo ();
2422 label:
2423 bar ();
2424 as these are likely intentional. */
2425 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2427 tree l;
2428 while (!gsi_end_p (gsi)
2429 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2430 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2431 && !case_label_p (&gimplify_ctxp->case_labels, l))
2432 gsi_next_nondebug (&gsi);
2433 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2434 return false;
2437 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2438 immediately breaks. */
2439 gsi = *gsi_p;
2441 /* Skip all immediately following labels. */
2442 while (!gsi_end_p (gsi)
2443 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2444 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2445 gsi_next_nondebug (&gsi);
2447 /* { ... something; default:; } */
2448 if (gsi_end_p (gsi)
2449 /* { ... something; default: break; } or
2450 { ... something; default: goto L; } */
2451 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2452 /* { ... something; default: return; } */
2453 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2454 return false;
2456 return true;
2459 /* Callback for walk_gimple_seq. */
2461 static tree
2462 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2463 struct walk_stmt_info *)
2465 gimple *stmt = gsi_stmt (*gsi_p);
2467 *handled_ops_p = true;
2468 switch (gimple_code (stmt))
2470 case GIMPLE_TRY:
2471 case GIMPLE_BIND:
2472 case GIMPLE_CATCH:
2473 case GIMPLE_EH_FILTER:
2474 case GIMPLE_TRANSACTION:
2475 /* Walk the sub-statements. */
2476 *handled_ops_p = false;
2477 break;
2479 /* Find a sequence of form:
2481 GIMPLE_LABEL
2482 [...]
2483 <may fallthru stmt>
2484 GIMPLE_LABEL
2486 and possibly warn. */
2487 case GIMPLE_LABEL:
2489 /* Found a label. Skip all immediately following labels. */
2490 while (!gsi_end_p (*gsi_p)
2491 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2492 gsi_next_nondebug (gsi_p);
2494 /* There might be no more statements. */
2495 if (gsi_end_p (*gsi_p))
2496 return integer_zero_node;
2498 /* Vector of labels that fall through. */
2499 auto_vec <struct label_entry> labels;
2500 location_t prevloc;
2501 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2503 /* There might be no more statements. */
2504 if (gsi_end_p (*gsi_p))
2505 return integer_zero_node;
2507 gimple *next = gsi_stmt (*gsi_p);
2508 tree label;
2509 /* If what follows is a label, then we may have a fallthrough. */
2510 if (gimple_code (next) == GIMPLE_LABEL
2511 && gimple_has_location (next)
2512 && (label = gimple_label_label (as_a <glabel *> (next)))
2513 && prev != NULL)
2515 struct label_entry *l;
2516 bool warned_p = false;
2517 auto_diagnostic_group d;
2518 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2519 /* Quiet. */;
2520 else if (gimple_code (prev) == GIMPLE_LABEL
2521 && (label = gimple_label_label (as_a <glabel *> (prev)))
2522 && (l = find_label_entry (&labels, label)))
2523 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2524 "this statement may fall through");
2525 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2526 /* Try to be clever and don't warn when the statement
2527 can't actually fall through. */
2528 && gimple_stmt_may_fallthru (prev)
2529 && prevloc != UNKNOWN_LOCATION)
2530 warned_p = warning_at (prevloc,
2531 OPT_Wimplicit_fallthrough_,
2532 "this statement may fall through");
2533 if (warned_p)
2534 inform (gimple_location (next), "here");
2536 /* Mark this label as processed so as to prevent multiple
2537 warnings in nested switches. */
2538 FALLTHROUGH_LABEL_P (label) = true;
2540 /* So that next warn_implicit_fallthrough_r will start looking for
2541 a new sequence starting with this label. */
2542 gsi_prev (gsi_p);
2545 break;
2546 default:
2547 break;
2549 return NULL_TREE;
2552 /* Warn when a switch case falls through. */
2554 static void
2555 maybe_warn_implicit_fallthrough (gimple_seq seq)
2557 if (!warn_implicit_fallthrough)
2558 return;
2560 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2561 if (!(lang_GNU_C ()
2562 || lang_GNU_CXX ()
2563 || lang_GNU_OBJC ()))
2564 return;
2566 struct walk_stmt_info wi;
2567 memset (&wi, 0, sizeof (wi));
2568 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2571 /* Callback for walk_gimple_seq. */
2573 static tree
2574 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2575 struct walk_stmt_info *wi)
2577 gimple *stmt = gsi_stmt (*gsi_p);
2579 *handled_ops_p = true;
2580 switch (gimple_code (stmt))
2582 case GIMPLE_TRY:
2583 case GIMPLE_BIND:
2584 case GIMPLE_CATCH:
2585 case GIMPLE_EH_FILTER:
2586 case GIMPLE_TRANSACTION:
2587 /* Walk the sub-statements. */
2588 *handled_ops_p = false;
2589 break;
2590 case GIMPLE_CALL:
2591 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2593 gsi_remove (gsi_p, true);
2594 if (gsi_end_p (*gsi_p))
2596 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2597 return integer_zero_node;
2600 bool found = false;
2601 location_t loc = gimple_location (stmt);
2603 gimple_stmt_iterator gsi2 = *gsi_p;
2604 stmt = gsi_stmt (gsi2);
2605 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2607 /* Go on until the artificial label. */
2608 tree goto_dest = gimple_goto_dest (stmt);
2609 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2611 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2612 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2613 == goto_dest)
2614 break;
2617 /* Not found? Stop. */
2618 if (gsi_end_p (gsi2))
2619 break;
2621 /* Look one past it. */
2622 gsi_next (&gsi2);
2625 /* We're looking for a case label or default label here. */
2626 while (!gsi_end_p (gsi2))
2628 stmt = gsi_stmt (gsi2);
2629 if (gimple_code (stmt) == GIMPLE_LABEL)
2631 tree label = gimple_label_label (as_a <glabel *> (stmt));
2632 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2634 found = true;
2635 break;
2638 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2640 else if (!is_gimple_debug (stmt))
2641 /* Anything else is not expected. */
2642 break;
2643 gsi_next (&gsi2);
2645 if (!found)
2646 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2647 "a case label or default label");
2649 break;
2650 default:
2651 break;
2653 return NULL_TREE;
2656 /* Expand all FALLTHROUGH () calls in SEQ. */
2658 static void
2659 expand_FALLTHROUGH (gimple_seq *seq_p)
2661 struct walk_stmt_info wi;
2662 location_t loc;
2663 memset (&wi, 0, sizeof (wi));
2664 wi.info = (void *) &loc;
2665 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2666 if (wi.callback_result == integer_zero_node)
2667 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2668 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2669 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2670 "a case label or default label");
2674 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2675 branch to. */
2677 static enum gimplify_status
2678 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2680 tree switch_expr = *expr_p;
2681 gimple_seq switch_body_seq = NULL;
2682 enum gimplify_status ret;
2683 tree index_type = TREE_TYPE (switch_expr);
2684 if (index_type == NULL_TREE)
2685 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2687 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2688 fb_rvalue);
2689 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2690 return ret;
2692 if (SWITCH_BODY (switch_expr))
2694 vec<tree> labels;
2695 vec<tree> saved_labels;
2696 hash_set<tree> *saved_live_switch_vars = NULL;
2697 tree default_case = NULL_TREE;
2698 gswitch *switch_stmt;
2700 /* Save old labels, get new ones from body, then restore the old
2701 labels. Save all the things from the switch body to append after. */
2702 saved_labels = gimplify_ctxp->case_labels;
2703 gimplify_ctxp->case_labels.create (8);
2705 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2706 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2707 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2708 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2709 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2710 else
2711 gimplify_ctxp->live_switch_vars = NULL;
2713 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2714 gimplify_ctxp->in_switch_expr = true;
2716 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2718 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2719 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq);
2720 maybe_warn_implicit_fallthrough (switch_body_seq);
2721 /* Only do this for the outermost GIMPLE_SWITCH. */
2722 if (!gimplify_ctxp->in_switch_expr)
2723 expand_FALLTHROUGH (&switch_body_seq);
2725 labels = gimplify_ctxp->case_labels;
2726 gimplify_ctxp->case_labels = saved_labels;
2728 if (gimplify_ctxp->live_switch_vars)
2730 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2731 delete gimplify_ctxp->live_switch_vars;
2733 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2735 preprocess_case_label_vec_for_gimple (labels, index_type,
2736 &default_case);
2738 bool add_bind = false;
2739 if (!default_case)
2741 glabel *new_default;
2743 default_case
2744 = build_case_label (NULL_TREE, NULL_TREE,
2745 create_artificial_label (UNKNOWN_LOCATION));
2746 if (old_in_switch_expr)
2748 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2749 add_bind = true;
2751 new_default = gimple_build_label (CASE_LABEL (default_case));
2752 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2754 else if (old_in_switch_expr)
2756 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2757 if (last && gimple_code (last) == GIMPLE_LABEL)
2759 tree label = gimple_label_label (as_a <glabel *> (last));
2760 if (SWITCH_BREAK_LABEL_P (label))
2761 add_bind = true;
2765 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2766 default_case, labels);
2767 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2768 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2769 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2770 so that we can easily find the start and end of the switch
2771 statement. */
2772 if (add_bind)
2774 gimple_seq bind_body = NULL;
2775 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2776 gimple_seq_add_seq (&bind_body, switch_body_seq);
2777 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2778 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2779 gimplify_seq_add_stmt (pre_p, bind);
2781 else
2783 gimplify_seq_add_stmt (pre_p, switch_stmt);
2784 gimplify_seq_add_seq (pre_p, switch_body_seq);
2786 labels.release ();
2788 else
2789 gcc_unreachable ();
2791 return GS_ALL_DONE;
2794 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2796 static enum gimplify_status
2797 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2799 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2800 == current_function_decl);
2802 tree label = LABEL_EXPR_LABEL (*expr_p);
2803 glabel *label_stmt = gimple_build_label (label);
2804 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2805 gimplify_seq_add_stmt (pre_p, label_stmt);
2807 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2808 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2809 NOT_TAKEN));
2810 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2811 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2812 TAKEN));
2814 return GS_ALL_DONE;
2817 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2819 static enum gimplify_status
2820 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2822 struct gimplify_ctx *ctxp;
2823 glabel *label_stmt;
2825 /* Invalid programs can play Duff's Device type games with, for example,
2826 #pragma omp parallel. At least in the C front end, we don't
2827 detect such invalid branches until after gimplification, in the
2828 diagnose_omp_blocks pass. */
2829 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2830 if (ctxp->case_labels.exists ())
2831 break;
2833 tree label = CASE_LABEL (*expr_p);
2834 label_stmt = gimple_build_label (label);
2835 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2836 ctxp->case_labels.safe_push (*expr_p);
2837 gimplify_seq_add_stmt (pre_p, label_stmt);
2839 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2840 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2841 NOT_TAKEN));
2842 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2843 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2844 TAKEN));
2846 return GS_ALL_DONE;
2849 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2850 if necessary. */
2852 tree
2853 build_and_jump (tree *label_p)
2855 if (label_p == NULL)
2856 /* If there's nowhere to jump, just fall through. */
2857 return NULL_TREE;
2859 if (*label_p == NULL_TREE)
2861 tree label = create_artificial_label (UNKNOWN_LOCATION);
2862 *label_p = label;
2865 return build1 (GOTO_EXPR, void_type_node, *label_p);
2868 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2869 This also involves building a label to jump to and communicating it to
2870 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2872 static enum gimplify_status
2873 gimplify_exit_expr (tree *expr_p)
2875 tree cond = TREE_OPERAND (*expr_p, 0);
2876 tree expr;
2878 expr = build_and_jump (&gimplify_ctxp->exit_label);
2879 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2880 *expr_p = expr;
2882 return GS_OK;
2885 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2886 different from its canonical type, wrap the whole thing inside a
2887 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2888 type.
2890 The canonical type of a COMPONENT_REF is the type of the field being
2891 referenced--unless the field is a bit-field which can be read directly
2892 in a smaller mode, in which case the canonical type is the
2893 sign-appropriate type corresponding to that mode. */
2895 static void
2896 canonicalize_component_ref (tree *expr_p)
2898 tree expr = *expr_p;
2899 tree type;
2901 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2903 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2904 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2905 else
2906 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2908 /* One could argue that all the stuff below is not necessary for
2909 the non-bitfield case and declare it a FE error if type
2910 adjustment would be needed. */
2911 if (TREE_TYPE (expr) != type)
2913 #ifdef ENABLE_TYPES_CHECKING
2914 tree old_type = TREE_TYPE (expr);
2915 #endif
2916 int type_quals;
2918 /* We need to preserve qualifiers and propagate them from
2919 operand 0. */
2920 type_quals = TYPE_QUALS (type)
2921 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2922 if (TYPE_QUALS (type) != type_quals)
2923 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2925 /* Set the type of the COMPONENT_REF to the underlying type. */
2926 TREE_TYPE (expr) = type;
2928 #ifdef ENABLE_TYPES_CHECKING
2929 /* It is now a FE error, if the conversion from the canonical
2930 type to the original expression type is not useless. */
2931 gcc_assert (useless_type_conversion_p (old_type, type));
2932 #endif
2936 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2937 to foo, embed that change in the ADDR_EXPR by converting
2938 T array[U];
2939 (T *)&array
2941 &array[L]
2942 where L is the lower bound. For simplicity, only do this for constant
2943 lower bound.
2944 The constraint is that the type of &array[L] is trivially convertible
2945 to T *. */
2947 static void
2948 canonicalize_addr_expr (tree *expr_p)
2950 tree expr = *expr_p;
2951 tree addr_expr = TREE_OPERAND (expr, 0);
2952 tree datype, ddatype, pddatype;
2954 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2955 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2956 || TREE_CODE (addr_expr) != ADDR_EXPR)
2957 return;
2959 /* The addr_expr type should be a pointer to an array. */
2960 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2961 if (TREE_CODE (datype) != ARRAY_TYPE)
2962 return;
2964 /* The pointer to element type shall be trivially convertible to
2965 the expression pointer type. */
2966 ddatype = TREE_TYPE (datype);
2967 pddatype = build_pointer_type (ddatype);
2968 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2969 pddatype))
2970 return;
2972 /* The lower bound and element sizes must be constant. */
2973 if (!TYPE_SIZE_UNIT (ddatype)
2974 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2975 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2976 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2977 return;
2979 /* All checks succeeded. Build a new node to merge the cast. */
2980 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2981 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2982 NULL_TREE, NULL_TREE);
2983 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2985 /* We can have stripped a required restrict qualifier above. */
2986 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2987 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2990 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2991 underneath as appropriate. */
2993 static enum gimplify_status
2994 gimplify_conversion (tree *expr_p)
2996 location_t loc = EXPR_LOCATION (*expr_p);
2997 gcc_assert (CONVERT_EXPR_P (*expr_p));
2999 /* Then strip away all but the outermost conversion. */
3000 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3002 /* And remove the outermost conversion if it's useless. */
3003 if (tree_ssa_useless_type_conversion (*expr_p))
3004 *expr_p = TREE_OPERAND (*expr_p, 0);
3006 /* If we still have a conversion at the toplevel,
3007 then canonicalize some constructs. */
3008 if (CONVERT_EXPR_P (*expr_p))
3010 tree sub = TREE_OPERAND (*expr_p, 0);
3012 /* If a NOP conversion is changing the type of a COMPONENT_REF
3013 expression, then canonicalize its type now in order to expose more
3014 redundant conversions. */
3015 if (TREE_CODE (sub) == COMPONENT_REF)
3016 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
3018 /* If a NOP conversion is changing a pointer to array of foo
3019 to a pointer to foo, embed that change in the ADDR_EXPR. */
3020 else if (TREE_CODE (sub) == ADDR_EXPR)
3021 canonicalize_addr_expr (expr_p);
3024 /* If we have a conversion to a non-register type force the
3025 use of a VIEW_CONVERT_EXPR instead. */
3026 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3027 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3028 TREE_OPERAND (*expr_p, 0));
3030 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3031 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3032 TREE_SET_CODE (*expr_p, NOP_EXPR);
3034 return GS_OK;
3037 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3038 DECL_VALUE_EXPR, and it's worth re-examining things. */
3040 static enum gimplify_status
3041 gimplify_var_or_parm_decl (tree *expr_p)
3043 tree decl = *expr_p;
3045 /* ??? If this is a local variable, and it has not been seen in any
3046 outer BIND_EXPR, then it's probably the result of a duplicate
3047 declaration, for which we've already issued an error. It would
3048 be really nice if the front end wouldn't leak these at all.
3049 Currently the only known culprit is C++ destructors, as seen
3050 in g++.old-deja/g++.jason/binding.C.
3051 Another possible culpit are size expressions for variably modified
3052 types which are lost in the FE or not gimplified correctly. */
3053 if (VAR_P (decl)
3054 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3055 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3056 && decl_function_context (decl) == current_function_decl)
3058 gcc_assert (seen_error ());
3059 return GS_ERROR;
3062 /* When within an OMP context, notice uses of variables. */
3063 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3064 return GS_ALL_DONE;
3066 /* If the decl is an alias for another expression, substitute it now. */
3067 if (DECL_HAS_VALUE_EXPR_P (decl))
3069 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3070 return GS_OK;
3073 return GS_ALL_DONE;
3076 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3078 static void
3079 recalculate_side_effects (tree t)
3081 enum tree_code code = TREE_CODE (t);
3082 int len = TREE_OPERAND_LENGTH (t);
3083 int i;
3085 switch (TREE_CODE_CLASS (code))
3087 case tcc_expression:
3088 switch (code)
3090 case INIT_EXPR:
3091 case MODIFY_EXPR:
3092 case VA_ARG_EXPR:
3093 case PREDECREMENT_EXPR:
3094 case PREINCREMENT_EXPR:
3095 case POSTDECREMENT_EXPR:
3096 case POSTINCREMENT_EXPR:
3097 /* All of these have side-effects, no matter what their
3098 operands are. */
3099 return;
3101 default:
3102 break;
3104 /* Fall through. */
3106 case tcc_comparison: /* a comparison expression */
3107 case tcc_unary: /* a unary arithmetic expression */
3108 case tcc_binary: /* a binary arithmetic expression */
3109 case tcc_reference: /* a reference */
3110 case tcc_vl_exp: /* a function call */
3111 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3112 for (i = 0; i < len; ++i)
3114 tree op = TREE_OPERAND (t, i);
3115 if (op && TREE_SIDE_EFFECTS (op))
3116 TREE_SIDE_EFFECTS (t) = 1;
3118 break;
3120 case tcc_constant:
3121 /* No side-effects. */
3122 return;
3124 default:
3125 gcc_unreachable ();
3129 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3130 node *EXPR_P.
3132 compound_lval
3133 : min_lval '[' val ']'
3134 | min_lval '.' ID
3135 | compound_lval '[' val ']'
3136 | compound_lval '.' ID
3138 This is not part of the original SIMPLE definition, which separates
3139 array and member references, but it seems reasonable to handle them
3140 together. Also, this way we don't run into problems with union
3141 aliasing; gcc requires that for accesses through a union to alias, the
3142 union reference must be explicit, which was not always the case when we
3143 were splitting up array and member refs.
3145 PRE_P points to the sequence where side effects that must happen before
3146 *EXPR_P should be stored.
3148 POST_P points to the sequence where side effects that must happen after
3149 *EXPR_P should be stored. */
3151 static enum gimplify_status
3152 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3153 fallback_t fallback)
3155 tree *p;
3156 enum gimplify_status ret = GS_ALL_DONE, tret;
3157 int i;
3158 location_t loc = EXPR_LOCATION (*expr_p);
3159 tree expr = *expr_p;
3161 /* Create a stack of the subexpressions so later we can walk them in
3162 order from inner to outer. */
3163 auto_vec<tree, 10> expr_stack;
3165 /* We can handle anything that get_inner_reference can deal with. */
3166 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3168 restart:
3169 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3170 if (TREE_CODE (*p) == INDIRECT_REF)
3171 *p = fold_indirect_ref_loc (loc, *p);
3173 if (handled_component_p (*p))
3175 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3176 additional COMPONENT_REFs. */
3177 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3178 && gimplify_var_or_parm_decl (p) == GS_OK)
3179 goto restart;
3180 else
3181 break;
3183 expr_stack.safe_push (*p);
3186 gcc_assert (expr_stack.length ());
3188 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3189 walked through and P points to the innermost expression.
3191 Java requires that we elaborated nodes in source order. That
3192 means we must gimplify the inner expression followed by each of
3193 the indices, in order. But we can't gimplify the inner
3194 expression until we deal with any variable bounds, sizes, or
3195 positions in order to deal with PLACEHOLDER_EXPRs.
3197 The base expression may contain a statement expression that
3198 has declarations used in size expressions, so has to be
3199 gimplified before gimplifying the size expressions.
3201 So we do this in three steps. First we deal with variable
3202 bounds, sizes, and positions, then we gimplify the base and
3203 ensure it is memory if needed, then we deal with the annotations
3204 for any variables in the components and any indices, from left
3205 to right. */
3207 bool need_non_reg = false;
3208 for (i = expr_stack.length () - 1; i >= 0; i--)
3210 tree t = expr_stack[i];
3212 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3214 /* Deal with the low bound and element type size and put them into
3215 the ARRAY_REF. If these values are set, they have already been
3216 gimplified. */
3217 if (TREE_OPERAND (t, 2) == NULL_TREE)
3219 tree low = unshare_expr (array_ref_low_bound (t));
3220 if (!is_gimple_min_invariant (low))
3222 TREE_OPERAND (t, 2) = low;
3226 if (TREE_OPERAND (t, 3) == NULL_TREE)
3228 tree elmt_size = array_ref_element_size (t);
3229 if (!is_gimple_min_invariant (elmt_size))
3231 elmt_size = unshare_expr (elmt_size);
3232 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3233 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3235 /* Divide the element size by the alignment of the element
3236 type (above). */
3237 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3238 elmt_size, factor);
3240 TREE_OPERAND (t, 3) = elmt_size;
3243 need_non_reg = true;
3245 else if (TREE_CODE (t) == COMPONENT_REF)
3247 /* Set the field offset into T and gimplify it. */
3248 if (TREE_OPERAND (t, 2) == NULL_TREE)
3250 tree offset = component_ref_field_offset (t);
3251 if (!is_gimple_min_invariant (offset))
3253 offset = unshare_expr (offset);
3254 tree field = TREE_OPERAND (t, 1);
3255 tree factor
3256 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3258 /* Divide the offset by its alignment. */
3259 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3260 offset, factor);
3262 TREE_OPERAND (t, 2) = offset;
3265 need_non_reg = true;
3269 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3270 so as to match the min_lval predicate. Failure to do so may result
3271 in the creation of large aggregate temporaries. */
3272 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3273 fallback | fb_lvalue);
3274 ret = MIN (ret, tret);
3275 if (ret == GS_ERROR)
3276 return GS_ERROR;
3278 /* Step 2a: if we have component references we do not support on
3279 registers then make sure the base isn't a register. Of course
3280 we can only do so if an rvalue is OK. */
3281 if (need_non_reg && (fallback & fb_rvalue))
3282 prepare_gimple_addressable (p, pre_p);
3284 /* Step 3: gimplify size expressions and the indices and operands of
3285 ARRAY_REF. During this loop we also remove any useless conversions. */
3287 for (; expr_stack.length () > 0; )
3289 tree t = expr_stack.pop ();
3291 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3293 /* Gimplify the low bound and element type size. */
3294 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3295 is_gimple_reg, fb_rvalue);
3296 ret = MIN (ret, tret);
3298 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3299 is_gimple_reg, fb_rvalue);
3300 ret = MIN (ret, tret);
3302 /* Gimplify the dimension. */
3303 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3304 is_gimple_val, fb_rvalue);
3305 ret = MIN (ret, tret);
3307 else if (TREE_CODE (t) == COMPONENT_REF)
3309 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3310 is_gimple_reg, fb_rvalue);
3311 ret = MIN (ret, tret);
3314 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3316 /* The innermost expression P may have originally had
3317 TREE_SIDE_EFFECTS set which would have caused all the outer
3318 expressions in *EXPR_P leading to P to also have had
3319 TREE_SIDE_EFFECTS set. */
3320 recalculate_side_effects (t);
3323 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3324 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3326 canonicalize_component_ref (expr_p);
3329 expr_stack.release ();
3331 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3333 return ret;
3336 /* Gimplify the self modifying expression pointed to by EXPR_P
3337 (++, --, +=, -=).
3339 PRE_P points to the list where side effects that must happen before
3340 *EXPR_P should be stored.
3342 POST_P points to the list where side effects that must happen after
3343 *EXPR_P should be stored.
3345 WANT_VALUE is nonzero iff we want to use the value of this expression
3346 in another expression.
3348 ARITH_TYPE is the type the computation should be performed in. */
3350 enum gimplify_status
3351 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3352 bool want_value, tree arith_type)
3354 enum tree_code code;
3355 tree lhs, lvalue, rhs, t1;
3356 gimple_seq post = NULL, *orig_post_p = post_p;
3357 bool postfix;
3358 enum tree_code arith_code;
3359 enum gimplify_status ret;
3360 location_t loc = EXPR_LOCATION (*expr_p);
3362 code = TREE_CODE (*expr_p);
3364 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3365 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3367 /* Prefix or postfix? */
3368 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3369 /* Faster to treat as prefix if result is not used. */
3370 postfix = want_value;
3371 else
3372 postfix = false;
3374 /* For postfix, make sure the inner expression's post side effects
3375 are executed after side effects from this expression. */
3376 if (postfix)
3377 post_p = &post;
3379 /* Add or subtract? */
3380 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3381 arith_code = PLUS_EXPR;
3382 else
3383 arith_code = MINUS_EXPR;
3385 /* Gimplify the LHS into a GIMPLE lvalue. */
3386 lvalue = TREE_OPERAND (*expr_p, 0);
3387 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3388 if (ret == GS_ERROR)
3389 return ret;
3391 /* Extract the operands to the arithmetic operation. */
3392 lhs = lvalue;
3393 rhs = TREE_OPERAND (*expr_p, 1);
3395 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3396 that as the result value and in the postqueue operation. */
3397 if (postfix)
3399 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3400 if (ret == GS_ERROR)
3401 return ret;
3403 lhs = get_initialized_tmp_var (lhs, pre_p);
3406 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3407 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3409 rhs = convert_to_ptrofftype_loc (loc, rhs);
3410 if (arith_code == MINUS_EXPR)
3411 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3412 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3414 else
3415 t1 = fold_convert (TREE_TYPE (*expr_p),
3416 fold_build2 (arith_code, arith_type,
3417 fold_convert (arith_type, lhs),
3418 fold_convert (arith_type, rhs)));
3420 if (postfix)
3422 gimplify_assign (lvalue, t1, pre_p);
3423 gimplify_seq_add_seq (orig_post_p, post);
3424 *expr_p = lhs;
3425 return GS_ALL_DONE;
3427 else
3429 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3430 return GS_OK;
3434 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3436 static void
3437 maybe_with_size_expr (tree *expr_p)
3439 tree expr = *expr_p;
3440 tree type = TREE_TYPE (expr);
3441 tree size;
3443 /* If we've already wrapped this or the type is error_mark_node, we can't do
3444 anything. */
3445 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3446 || type == error_mark_node)
3447 return;
3449 /* If the size isn't known or is a constant, we have nothing to do. */
3450 size = TYPE_SIZE_UNIT (type);
3451 if (!size || poly_int_tree_p (size))
3452 return;
3454 /* Otherwise, make a WITH_SIZE_EXPR. */
3455 size = unshare_expr (size);
3456 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3457 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3460 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3461 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3462 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3463 gimplified to an SSA name. */
3465 enum gimplify_status
3466 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3467 bool allow_ssa)
3469 bool (*test) (tree);
3470 fallback_t fb;
3472 /* In general, we allow lvalues for function arguments to avoid
3473 extra overhead of copying large aggregates out of even larger
3474 aggregates into temporaries only to copy the temporaries to
3475 the argument list. Make optimizers happy by pulling out to
3476 temporaries those types that fit in registers. */
3477 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3478 test = is_gimple_val, fb = fb_rvalue;
3479 else
3481 test = is_gimple_lvalue, fb = fb_either;
3482 /* Also strip a TARGET_EXPR that would force an extra copy. */
3483 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3485 tree init = TARGET_EXPR_INITIAL (*arg_p);
3486 if (init
3487 && !VOID_TYPE_P (TREE_TYPE (init)))
3488 *arg_p = init;
3492 /* If this is a variable sized type, we must remember the size. */
3493 maybe_with_size_expr (arg_p);
3495 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3496 /* Make sure arguments have the same location as the function call
3497 itself. */
3498 protected_set_expr_location (*arg_p, call_location);
3500 /* There is a sequence point before a function call. Side effects in
3501 the argument list must occur before the actual call. So, when
3502 gimplifying arguments, force gimplify_expr to use an internal
3503 post queue which is then appended to the end of PRE_P. */
3504 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3507 /* Don't fold inside offloading or taskreg regions: it can break code by
3508 adding decl references that weren't in the source. We'll do it during
3509 omplower pass instead. */
3511 static bool
3512 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3514 struct gimplify_omp_ctx *ctx;
3515 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3516 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3517 return false;
3518 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3519 return false;
3520 /* Delay folding of builtins until the IL is in consistent state
3521 so the diagnostic machinery can do a better job. */
3522 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3523 return false;
3524 return fold_stmt (gsi);
3527 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3528 WANT_VALUE is true if the result of the call is desired. */
3530 static enum gimplify_status
3531 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3533 tree fndecl, parms, p, fnptrtype;
3534 enum gimplify_status ret;
3535 int i, nargs;
3536 gcall *call;
3537 bool builtin_va_start_p = false;
3538 location_t loc = EXPR_LOCATION (*expr_p);
3540 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3542 /* For reliable diagnostics during inlining, it is necessary that
3543 every call_expr be annotated with file and line. */
3544 if (! EXPR_HAS_LOCATION (*expr_p))
3545 SET_EXPR_LOCATION (*expr_p, input_location);
3547 /* Gimplify internal functions created in the FEs. */
3548 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3550 if (want_value)
3551 return GS_ALL_DONE;
3553 nargs = call_expr_nargs (*expr_p);
3554 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3555 auto_vec<tree> vargs (nargs);
3557 if (ifn == IFN_ASSUME)
3559 if (simple_condition_p (CALL_EXPR_ARG (*expr_p, 0)))
3561 /* If the [[assume (cond)]]; condition is simple
3562 enough and can be evaluated unconditionally
3563 without side-effects, expand it as
3564 if (!cond) __builtin_unreachable (); */
3565 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
3566 *expr_p = build3 (COND_EXPR, void_type_node,
3567 CALL_EXPR_ARG (*expr_p, 0), void_node,
3568 build_call_expr_loc (EXPR_LOCATION (*expr_p),
3569 fndecl, 0));
3570 return GS_OK;
3572 /* If not optimizing, ignore the assumptions. */
3573 if (!optimize || seen_error ())
3575 *expr_p = NULL_TREE;
3576 return GS_ALL_DONE;
3578 /* Temporarily, until gimple lowering, transform
3579 .ASSUME (cond);
3580 into:
3581 [[assume (guard)]]
3583 guard = cond;
3585 such that gimple lowering can outline the condition into
3586 a separate function easily. */
3587 tree guard = create_tmp_var (boolean_type_node);
3588 *expr_p = build2 (MODIFY_EXPR, void_type_node, guard,
3589 gimple_boolify (CALL_EXPR_ARG (*expr_p, 0)));
3590 *expr_p = build3 (BIND_EXPR, void_type_node, NULL, *expr_p, NULL);
3591 push_gimplify_context ();
3592 gimple_seq body = NULL;
3593 gimple *g = gimplify_and_return_first (*expr_p, &body);
3594 pop_gimplify_context (g);
3595 g = gimple_build_assume (guard, body);
3596 gimple_set_location (g, loc);
3597 gimplify_seq_add_stmt (pre_p, g);
3598 *expr_p = NULL_TREE;
3599 return GS_ALL_DONE;
3602 for (i = 0; i < nargs; i++)
3604 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3605 EXPR_LOCATION (*expr_p));
3606 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3609 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3610 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3611 gimplify_seq_add_stmt (pre_p, call);
3612 return GS_ALL_DONE;
3615 /* This may be a call to a builtin function.
3617 Builtin function calls may be transformed into different
3618 (and more efficient) builtin function calls under certain
3619 circumstances. Unfortunately, gimplification can muck things
3620 up enough that the builtin expanders are not aware that certain
3621 transformations are still valid.
3623 So we attempt transformation/gimplification of the call before
3624 we gimplify the CALL_EXPR. At this time we do not manage to
3625 transform all calls in the same manner as the expanders do, but
3626 we do transform most of them. */
3627 fndecl = get_callee_fndecl (*expr_p);
3628 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3629 switch (DECL_FUNCTION_CODE (fndecl))
3631 CASE_BUILT_IN_ALLOCA:
3632 /* If the call has been built for a variable-sized object, then we
3633 want to restore the stack level when the enclosing BIND_EXPR is
3634 exited to reclaim the allocated space; otherwise, we precisely
3635 need to do the opposite and preserve the latest stack level. */
3636 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3637 gimplify_ctxp->save_stack = true;
3638 else
3639 gimplify_ctxp->keep_stack = true;
3640 break;
3642 case BUILT_IN_VA_START:
3644 builtin_va_start_p = TRUE;
3645 if (call_expr_nargs (*expr_p) < 2)
3647 error ("too few arguments to function %<va_start%>");
3648 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3649 return GS_OK;
3652 if (fold_builtin_next_arg (*expr_p, true))
3654 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3655 return GS_OK;
3657 break;
3660 case BUILT_IN_EH_RETURN:
3661 cfun->calls_eh_return = true;
3662 break;
3664 case BUILT_IN_CLEAR_PADDING:
3665 if (call_expr_nargs (*expr_p) == 1)
3667 /* Remember the original type of the argument in an internal
3668 dummy second argument, as in GIMPLE pointer conversions are
3669 useless. Also mark this call as not for automatic
3670 initialization in the internal dummy third argument. */
3671 p = CALL_EXPR_ARG (*expr_p, 0);
3672 *expr_p
3673 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3674 build_zero_cst (TREE_TYPE (p)));
3675 return GS_OK;
3677 break;
3679 default:
3682 if (fndecl && fndecl_built_in_p (fndecl))
3684 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3685 if (new_tree && new_tree != *expr_p)
3687 /* There was a transformation of this call which computes the
3688 same value, but in a more efficient way. Return and try
3689 again. */
3690 *expr_p = new_tree;
3691 return GS_OK;
3695 /* Remember the original function pointer type. */
3696 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3698 if (flag_openmp
3699 && fndecl
3700 && cfun
3701 && (cfun->curr_properties & PROP_gimple_any) == 0)
3703 tree variant = omp_resolve_declare_variant (fndecl);
3704 if (variant != fndecl)
3705 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3708 /* There is a sequence point before the call, so any side effects in
3709 the calling expression must occur before the actual call. Force
3710 gimplify_expr to use an internal post queue. */
3711 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3712 is_gimple_call_addr, fb_rvalue);
3714 if (ret == GS_ERROR)
3715 return GS_ERROR;
3717 nargs = call_expr_nargs (*expr_p);
3719 /* Get argument types for verification. */
3720 fndecl = get_callee_fndecl (*expr_p);
3721 parms = NULL_TREE;
3722 if (fndecl)
3723 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3724 else
3725 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3727 if (fndecl && DECL_ARGUMENTS (fndecl))
3728 p = DECL_ARGUMENTS (fndecl);
3729 else if (parms)
3730 p = parms;
3731 else
3732 p = NULL_TREE;
3733 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3736 /* If the last argument is __builtin_va_arg_pack () and it is not
3737 passed as a named argument, decrease the number of CALL_EXPR
3738 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3739 if (!p
3740 && i < nargs
3741 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3743 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3744 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3746 if (last_arg_fndecl
3747 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3749 tree call = *expr_p;
3751 --nargs;
3752 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3753 CALL_EXPR_FN (call),
3754 nargs, CALL_EXPR_ARGP (call));
3756 /* Copy all CALL_EXPR flags, location and block, except
3757 CALL_EXPR_VA_ARG_PACK flag. */
3758 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3759 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3760 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3761 = CALL_EXPR_RETURN_SLOT_OPT (call);
3762 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3763 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3765 /* Set CALL_EXPR_VA_ARG_PACK. */
3766 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3770 /* If the call returns twice then after building the CFG the call
3771 argument computations will no longer dominate the call because
3772 we add an abnormal incoming edge to the call. So do not use SSA
3773 vars there. */
3774 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3776 /* Gimplify the function arguments. */
3777 if (nargs > 0)
3779 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3780 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3781 PUSH_ARGS_REVERSED ? i-- : i++)
3783 enum gimplify_status t;
3785 /* Avoid gimplifying the second argument to va_start, which needs to
3786 be the plain PARM_DECL. */
3787 if ((i != 1) || !builtin_va_start_p)
3789 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3790 EXPR_LOCATION (*expr_p), ! returns_twice);
3792 if (t == GS_ERROR)
3793 ret = GS_ERROR;
3798 /* Gimplify the static chain. */
3799 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3801 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3802 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3803 else
3805 enum gimplify_status t;
3806 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3807 EXPR_LOCATION (*expr_p), ! returns_twice);
3808 if (t == GS_ERROR)
3809 ret = GS_ERROR;
3813 /* Verify the function result. */
3814 if (want_value && fndecl
3815 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3817 error_at (loc, "using result of function returning %<void%>");
3818 ret = GS_ERROR;
3821 /* Try this again in case gimplification exposed something. */
3822 if (ret != GS_ERROR)
3824 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3826 if (new_tree && new_tree != *expr_p)
3828 /* There was a transformation of this call which computes the
3829 same value, but in a more efficient way. Return and try
3830 again. */
3831 *expr_p = new_tree;
3832 return GS_OK;
3835 else
3837 *expr_p = error_mark_node;
3838 return GS_ERROR;
3841 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3842 decl. This allows us to eliminate redundant or useless
3843 calls to "const" functions. */
3844 if (TREE_CODE (*expr_p) == CALL_EXPR)
3846 int flags = call_expr_flags (*expr_p);
3847 if (flags & (ECF_CONST | ECF_PURE)
3848 /* An infinite loop is considered a side effect. */
3849 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3850 TREE_SIDE_EFFECTS (*expr_p) = 0;
3853 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3854 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3855 form and delegate the creation of a GIMPLE_CALL to
3856 gimplify_modify_expr. This is always possible because when
3857 WANT_VALUE is true, the caller wants the result of this call into
3858 a temporary, which means that we will emit an INIT_EXPR in
3859 internal_get_tmp_var which will then be handled by
3860 gimplify_modify_expr. */
3861 if (!want_value)
3863 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3864 have to do is replicate it as a GIMPLE_CALL tuple. */
3865 gimple_stmt_iterator gsi;
3866 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3867 notice_special_calls (call);
3868 gimplify_seq_add_stmt (pre_p, call);
3869 gsi = gsi_last (*pre_p);
3870 maybe_fold_stmt (&gsi);
3871 *expr_p = NULL_TREE;
3873 else
3874 /* Remember the original function type. */
3875 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3876 CALL_EXPR_FN (*expr_p));
3878 return ret;
3881 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3882 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3884 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3885 condition is true or false, respectively. If null, we should generate
3886 our own to skip over the evaluation of this specific expression.
3888 LOCUS is the source location of the COND_EXPR.
3890 This function is the tree equivalent of do_jump.
3892 shortcut_cond_r should only be called by shortcut_cond_expr. */
3894 static tree
3895 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3896 location_t locus)
3898 tree local_label = NULL_TREE;
3899 tree t, expr = NULL;
3901 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3902 retain the shortcut semantics. Just insert the gotos here;
3903 shortcut_cond_expr will append the real blocks later. */
3904 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3906 location_t new_locus;
3908 /* Turn if (a && b) into
3910 if (a); else goto no;
3911 if (b) goto yes; else goto no;
3912 (no:) */
3914 if (false_label_p == NULL)
3915 false_label_p = &local_label;
3917 /* Keep the original source location on the first 'if'. */
3918 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3919 append_to_statement_list (t, &expr);
3921 /* Set the source location of the && on the second 'if'. */
3922 new_locus = rexpr_location (pred, locus);
3923 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3924 new_locus);
3925 append_to_statement_list (t, &expr);
3927 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3929 location_t new_locus;
3931 /* Turn if (a || b) into
3933 if (a) goto yes;
3934 if (b) goto yes; else goto no;
3935 (yes:) */
3937 if (true_label_p == NULL)
3938 true_label_p = &local_label;
3940 /* Keep the original source location on the first 'if'. */
3941 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3942 append_to_statement_list (t, &expr);
3944 /* Set the source location of the || on the second 'if'. */
3945 new_locus = rexpr_location (pred, locus);
3946 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3947 new_locus);
3948 append_to_statement_list (t, &expr);
3950 else if (TREE_CODE (pred) == COND_EXPR
3951 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3952 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3954 location_t new_locus;
3956 /* As long as we're messing with gotos, turn if (a ? b : c) into
3957 if (a)
3958 if (b) goto yes; else goto no;
3959 else
3960 if (c) goto yes; else goto no;
3962 Don't do this if one of the arms has void type, which can happen
3963 in C++ when the arm is throw. */
3965 /* Keep the original source location on the first 'if'. Set the source
3966 location of the ? on the second 'if'. */
3967 new_locus = rexpr_location (pred, locus);
3968 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3969 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3970 false_label_p, locus),
3971 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3972 false_label_p, new_locus));
3974 else
3976 expr = build3 (COND_EXPR, void_type_node, pred,
3977 build_and_jump (true_label_p),
3978 build_and_jump (false_label_p));
3979 SET_EXPR_LOCATION (expr, locus);
3982 if (local_label)
3984 t = build1 (LABEL_EXPR, void_type_node, local_label);
3985 append_to_statement_list (t, &expr);
3988 return expr;
3991 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3992 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3993 statement, if it is the last one. Otherwise, return NULL. */
3995 static tree
3996 find_goto (tree expr)
3998 if (!expr)
3999 return NULL_TREE;
4001 if (TREE_CODE (expr) == GOTO_EXPR)
4002 return expr;
4004 if (TREE_CODE (expr) != STATEMENT_LIST)
4005 return NULL_TREE;
4007 tree_stmt_iterator i = tsi_start (expr);
4009 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
4010 tsi_next (&i);
4012 if (!tsi_one_before_end_p (i))
4013 return NULL_TREE;
4015 return find_goto (tsi_stmt (i));
4018 /* Same as find_goto, except that it returns NULL if the destination
4019 is not a LABEL_DECL. */
4021 static inline tree
4022 find_goto_label (tree expr)
4024 tree dest = find_goto (expr);
4025 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
4026 return dest;
4027 return NULL_TREE;
4030 /* Given a conditional expression EXPR with short-circuit boolean
4031 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4032 predicate apart into the equivalent sequence of conditionals. */
4034 static tree
4035 shortcut_cond_expr (tree expr)
4037 tree pred = TREE_OPERAND (expr, 0);
4038 tree then_ = TREE_OPERAND (expr, 1);
4039 tree else_ = TREE_OPERAND (expr, 2);
4040 tree true_label, false_label, end_label, t;
4041 tree *true_label_p;
4042 tree *false_label_p;
4043 bool emit_end, emit_false, jump_over_else;
4044 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
4045 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
4047 /* First do simple transformations. */
4048 if (!else_se)
4050 /* If there is no 'else', turn
4051 if (a && b) then c
4052 into
4053 if (a) if (b) then c. */
4054 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4056 /* Keep the original source location on the first 'if'. */
4057 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4058 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4059 /* Set the source location of the && on the second 'if'. */
4060 if (rexpr_has_location (pred))
4061 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4062 then_ = shortcut_cond_expr (expr);
4063 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4064 pred = TREE_OPERAND (pred, 0);
4065 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
4066 SET_EXPR_LOCATION (expr, locus);
4070 if (!then_se)
4072 /* If there is no 'then', turn
4073 if (a || b); else d
4074 into
4075 if (a); else if (b); else d. */
4076 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4078 /* Keep the original source location on the first 'if'. */
4079 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4080 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4081 /* Set the source location of the || on the second 'if'. */
4082 if (rexpr_has_location (pred))
4083 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4084 else_ = shortcut_cond_expr (expr);
4085 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4086 pred = TREE_OPERAND (pred, 0);
4087 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
4088 SET_EXPR_LOCATION (expr, locus);
4092 /* If we're done, great. */
4093 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
4094 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
4095 return expr;
4097 /* Otherwise we need to mess with gotos. Change
4098 if (a) c; else d;
4100 if (a); else goto no;
4101 c; goto end;
4102 no: d; end:
4103 and recursively gimplify the condition. */
4105 true_label = false_label = end_label = NULL_TREE;
4107 /* If our arms just jump somewhere, hijack those labels so we don't
4108 generate jumps to jumps. */
4110 if (tree then_goto = find_goto_label (then_))
4112 true_label = GOTO_DESTINATION (then_goto);
4113 then_ = NULL;
4114 then_se = false;
4117 if (tree else_goto = find_goto_label (else_))
4119 false_label = GOTO_DESTINATION (else_goto);
4120 else_ = NULL;
4121 else_se = false;
4124 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4125 if (true_label)
4126 true_label_p = &true_label;
4127 else
4128 true_label_p = NULL;
4130 /* The 'else' branch also needs a label if it contains interesting code. */
4131 if (false_label || else_se)
4132 false_label_p = &false_label;
4133 else
4134 false_label_p = NULL;
4136 /* If there was nothing else in our arms, just forward the label(s). */
4137 if (!then_se && !else_se)
4138 return shortcut_cond_r (pred, true_label_p, false_label_p,
4139 EXPR_LOC_OR_LOC (expr, input_location));
4141 /* If our last subexpression already has a terminal label, reuse it. */
4142 if (else_se)
4143 t = expr_last (else_);
4144 else if (then_se)
4145 t = expr_last (then_);
4146 else
4147 t = NULL;
4148 if (t && TREE_CODE (t) == LABEL_EXPR)
4149 end_label = LABEL_EXPR_LABEL (t);
4151 /* If we don't care about jumping to the 'else' branch, jump to the end
4152 if the condition is false. */
4153 if (!false_label_p)
4154 false_label_p = &end_label;
4156 /* We only want to emit these labels if we aren't hijacking them. */
4157 emit_end = (end_label == NULL_TREE);
4158 emit_false = (false_label == NULL_TREE);
4160 /* We only emit the jump over the else clause if we have to--if the
4161 then clause may fall through. Otherwise we can wind up with a
4162 useless jump and a useless label at the end of gimplified code,
4163 which will cause us to think that this conditional as a whole
4164 falls through even if it doesn't. If we then inline a function
4165 which ends with such a condition, that can cause us to issue an
4166 inappropriate warning about control reaching the end of a
4167 non-void function. */
4168 jump_over_else = block_may_fallthru (then_);
4170 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
4171 EXPR_LOC_OR_LOC (expr, input_location));
4173 expr = NULL;
4174 append_to_statement_list (pred, &expr);
4176 append_to_statement_list (then_, &expr);
4177 if (else_se)
4179 if (jump_over_else)
4181 tree last = expr_last (expr);
4182 t = build_and_jump (&end_label);
4183 if (rexpr_has_location (last))
4184 SET_EXPR_LOCATION (t, rexpr_location (last));
4185 append_to_statement_list (t, &expr);
4187 if (emit_false)
4189 t = build1 (LABEL_EXPR, void_type_node, false_label);
4190 append_to_statement_list (t, &expr);
4192 append_to_statement_list (else_, &expr);
4194 if (emit_end && end_label)
4196 t = build1 (LABEL_EXPR, void_type_node, end_label);
4197 append_to_statement_list (t, &expr);
4200 return expr;
4203 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4205 tree
4206 gimple_boolify (tree expr)
4208 tree type = TREE_TYPE (expr);
4209 location_t loc = EXPR_LOCATION (expr);
4211 if (TREE_CODE (expr) == NE_EXPR
4212 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
4213 && integer_zerop (TREE_OPERAND (expr, 1)))
4215 tree call = TREE_OPERAND (expr, 0);
4216 tree fn = get_callee_fndecl (call);
4218 /* For __builtin_expect ((long) (x), y) recurse into x as well
4219 if x is truth_value_p. */
4220 if (fn
4221 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
4222 && call_expr_nargs (call) == 2)
4224 tree arg = CALL_EXPR_ARG (call, 0);
4225 if (arg)
4227 if (TREE_CODE (arg) == NOP_EXPR
4228 && TREE_TYPE (arg) == TREE_TYPE (call))
4229 arg = TREE_OPERAND (arg, 0);
4230 if (truth_value_p (TREE_CODE (arg)))
4232 arg = gimple_boolify (arg);
4233 CALL_EXPR_ARG (call, 0)
4234 = fold_convert_loc (loc, TREE_TYPE (call), arg);
4240 switch (TREE_CODE (expr))
4242 case TRUTH_AND_EXPR:
4243 case TRUTH_OR_EXPR:
4244 case TRUTH_XOR_EXPR:
4245 case TRUTH_ANDIF_EXPR:
4246 case TRUTH_ORIF_EXPR:
4247 /* Also boolify the arguments of truth exprs. */
4248 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
4249 /* FALLTHRU */
4251 case TRUTH_NOT_EXPR:
4252 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4254 /* These expressions always produce boolean results. */
4255 if (TREE_CODE (type) != BOOLEAN_TYPE)
4256 TREE_TYPE (expr) = boolean_type_node;
4257 return expr;
4259 case ANNOTATE_EXPR:
4260 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4262 case annot_expr_ivdep_kind:
4263 case annot_expr_unroll_kind:
4264 case annot_expr_no_vector_kind:
4265 case annot_expr_vector_kind:
4266 case annot_expr_parallel_kind:
4267 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4268 if (TREE_CODE (type) != BOOLEAN_TYPE)
4269 TREE_TYPE (expr) = boolean_type_node;
4270 return expr;
4271 default:
4272 gcc_unreachable ();
4275 default:
4276 if (COMPARISON_CLASS_P (expr))
4278 /* These expressions always produce boolean results. */
4279 if (TREE_CODE (type) != BOOLEAN_TYPE)
4280 TREE_TYPE (expr) = boolean_type_node;
4281 return expr;
4283 /* Other expressions that get here must have boolean values, but
4284 might need to be converted to the appropriate mode. */
4285 if (TREE_CODE (type) == BOOLEAN_TYPE)
4286 return expr;
4287 return fold_convert_loc (loc, boolean_type_node, expr);
4291 /* Given a conditional expression *EXPR_P without side effects, gimplify
4292 its operands. New statements are inserted to PRE_P. */
4294 static enum gimplify_status
4295 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4297 tree expr = *expr_p, cond;
4298 enum gimplify_status ret, tret;
4299 enum tree_code code;
4301 cond = gimple_boolify (COND_EXPR_COND (expr));
4303 /* We need to handle && and || specially, as their gimplification
4304 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4305 code = TREE_CODE (cond);
4306 if (code == TRUTH_ANDIF_EXPR)
4307 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4308 else if (code == TRUTH_ORIF_EXPR)
4309 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4310 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_val, fb_rvalue);
4311 COND_EXPR_COND (*expr_p) = cond;
4313 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4314 is_gimple_val, fb_rvalue);
4315 ret = MIN (ret, tret);
4316 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4317 is_gimple_val, fb_rvalue);
4319 return MIN (ret, tret);
4322 /* Return true if evaluating EXPR could trap.
4323 EXPR is GENERIC, while tree_could_trap_p can be called
4324 only on GIMPLE. */
4326 bool
4327 generic_expr_could_trap_p (tree expr)
4329 unsigned i, n;
4331 if (!expr || is_gimple_val (expr))
4332 return false;
4334 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4335 return true;
4337 n = TREE_OPERAND_LENGTH (expr);
4338 for (i = 0; i < n; i++)
4339 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4340 return true;
4342 return false;
4345 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4346 into
4348 if (p) if (p)
4349 t1 = a; a;
4350 else or else
4351 t1 = b; b;
4354 The second form is used when *EXPR_P is of type void.
4356 PRE_P points to the list where side effects that must happen before
4357 *EXPR_P should be stored. */
4359 static enum gimplify_status
4360 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4362 tree expr = *expr_p;
4363 tree type = TREE_TYPE (expr);
4364 location_t loc = EXPR_LOCATION (expr);
4365 tree tmp, arm1, arm2;
4366 enum gimplify_status ret;
4367 tree label_true, label_false, label_cont;
4368 bool have_then_clause_p, have_else_clause_p;
4369 gcond *cond_stmt;
4370 enum tree_code pred_code;
4371 gimple_seq seq = NULL;
4373 /* If this COND_EXPR has a value, copy the values into a temporary within
4374 the arms. */
4375 if (!VOID_TYPE_P (type))
4377 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4378 tree result;
4380 /* If either an rvalue is ok or we do not require an lvalue, create the
4381 temporary. But we cannot do that if the type is addressable. */
4382 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4383 && !TREE_ADDRESSABLE (type))
4385 if (gimplify_ctxp->allow_rhs_cond_expr
4386 /* If either branch has side effects or could trap, it can't be
4387 evaluated unconditionally. */
4388 && !TREE_SIDE_EFFECTS (then_)
4389 && !generic_expr_could_trap_p (then_)
4390 && !TREE_SIDE_EFFECTS (else_)
4391 && !generic_expr_could_trap_p (else_))
4392 return gimplify_pure_cond_expr (expr_p, pre_p);
4394 tmp = create_tmp_var (type, "iftmp");
4395 result = tmp;
4398 /* Otherwise, only create and copy references to the values. */
4399 else
4401 type = build_pointer_type (type);
4403 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4404 then_ = build_fold_addr_expr_loc (loc, then_);
4406 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4407 else_ = build_fold_addr_expr_loc (loc, else_);
4409 expr
4410 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4412 tmp = create_tmp_var (type, "iftmp");
4413 result = build_simple_mem_ref_loc (loc, tmp);
4416 /* Build the new then clause, `tmp = then_;'. But don't build the
4417 assignment if the value is void; in C++ it can be if it's a throw. */
4418 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4419 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4421 /* Similarly, build the new else clause, `tmp = else_;'. */
4422 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4423 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4425 TREE_TYPE (expr) = void_type_node;
4426 recalculate_side_effects (expr);
4428 /* Move the COND_EXPR to the prequeue. */
4429 gimplify_stmt (&expr, pre_p);
4431 *expr_p = result;
4432 return GS_ALL_DONE;
4435 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4436 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4437 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4438 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4440 /* Make sure the condition has BOOLEAN_TYPE. */
4441 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4443 /* Break apart && and || conditions. */
4444 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4445 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4447 expr = shortcut_cond_expr (expr);
4449 if (expr != *expr_p)
4451 *expr_p = expr;
4453 /* We can't rely on gimplify_expr to re-gimplify the expanded
4454 form properly, as cleanups might cause the target labels to be
4455 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4456 set up a conditional context. */
4457 gimple_push_condition ();
4458 gimplify_stmt (expr_p, &seq);
4459 gimple_pop_condition (pre_p);
4460 gimple_seq_add_seq (pre_p, seq);
4462 return GS_ALL_DONE;
4466 /* Now do the normal gimplification. */
4468 /* Gimplify condition. */
4469 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4470 is_gimple_condexpr_for_cond, fb_rvalue);
4471 if (ret == GS_ERROR)
4472 return GS_ERROR;
4473 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4475 gimple_push_condition ();
4477 have_then_clause_p = have_else_clause_p = false;
4478 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4479 if (label_true
4480 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4481 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4482 have different locations, otherwise we end up with incorrect
4483 location information on the branches. */
4484 && (optimize
4485 || !EXPR_HAS_LOCATION (expr)
4486 || !rexpr_has_location (label_true)
4487 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4489 have_then_clause_p = true;
4490 label_true = GOTO_DESTINATION (label_true);
4492 else
4493 label_true = create_artificial_label (UNKNOWN_LOCATION);
4494 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4495 if (label_false
4496 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4497 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4498 have different locations, otherwise we end up with incorrect
4499 location information on the branches. */
4500 && (optimize
4501 || !EXPR_HAS_LOCATION (expr)
4502 || !rexpr_has_location (label_false)
4503 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4505 have_else_clause_p = true;
4506 label_false = GOTO_DESTINATION (label_false);
4508 else
4509 label_false = create_artificial_label (UNKNOWN_LOCATION);
4511 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4512 &arm2);
4513 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4514 label_false);
4515 gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
4516 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4517 gimplify_seq_add_stmt (&seq, cond_stmt);
4518 gimple_stmt_iterator gsi = gsi_last (seq);
4519 maybe_fold_stmt (&gsi);
4521 label_cont = NULL_TREE;
4522 if (!have_then_clause_p)
4524 /* For if (...) {} else { code; } put label_true after
4525 the else block. */
4526 if (TREE_OPERAND (expr, 1) == NULL_TREE
4527 && !have_else_clause_p
4528 && TREE_OPERAND (expr, 2) != NULL_TREE)
4530 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4531 handling that label_cont == label_true can be only reached
4532 through fallthrough from { code; }. */
4533 if (integer_zerop (COND_EXPR_COND (expr)))
4534 UNUSED_LABEL_P (label_true) = 1;
4535 label_cont = label_true;
4537 else
4539 bool then_side_effects
4540 = (TREE_OPERAND (expr, 1)
4541 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
4542 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4543 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4544 /* For if (...) { code; } else {} or
4545 if (...) { code; } else goto label; or
4546 if (...) { code; return; } else { ... }
4547 label_cont isn't needed. */
4548 if (!have_else_clause_p
4549 && TREE_OPERAND (expr, 2) != NULL_TREE
4550 && gimple_seq_may_fallthru (seq))
4552 gimple *g;
4553 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4555 /* For if (0) { non-side-effect-code } else { code }
4556 tell -Wimplicit-fallthrough handling that label_cont can
4557 be only reached through fallthrough from { code }. */
4558 if (integer_zerop (COND_EXPR_COND (expr)))
4560 UNUSED_LABEL_P (label_true) = 1;
4561 if (!then_side_effects)
4562 UNUSED_LABEL_P (label_cont) = 1;
4565 g = gimple_build_goto (label_cont);
4567 /* GIMPLE_COND's are very low level; they have embedded
4568 gotos. This particular embedded goto should not be marked
4569 with the location of the original COND_EXPR, as it would
4570 correspond to the COND_EXPR's condition, not the ELSE or the
4571 THEN arms. To avoid marking it with the wrong location, flag
4572 it as "no location". */
4573 gimple_set_do_not_emit_location (g);
4575 gimplify_seq_add_stmt (&seq, g);
4579 if (!have_else_clause_p)
4581 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4582 tell -Wimplicit-fallthrough handling that label_false can be only
4583 reached through fallthrough from { code }. */
4584 if (integer_nonzerop (COND_EXPR_COND (expr))
4585 && (TREE_OPERAND (expr, 2) == NULL_TREE
4586 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
4587 UNUSED_LABEL_P (label_false) = 1;
4588 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4589 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4591 if (label_cont)
4592 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4594 gimple_pop_condition (pre_p);
4595 gimple_seq_add_seq (pre_p, seq);
4597 if (ret == GS_ERROR)
4598 ; /* Do nothing. */
4599 else if (have_then_clause_p || have_else_clause_p)
4600 ret = GS_ALL_DONE;
4601 else
4603 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4604 expr = TREE_OPERAND (expr, 0);
4605 gimplify_stmt (&expr, pre_p);
4608 *expr_p = NULL;
4609 return ret;
4612 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4613 to be marked addressable.
4615 We cannot rely on such an expression being directly markable if a temporary
4616 has been created by the gimplification. In this case, we create another
4617 temporary and initialize it with a copy, which will become a store after we
4618 mark it addressable. This can happen if the front-end passed us something
4619 that it could not mark addressable yet, like a Fortran pass-by-reference
4620 parameter (int) floatvar. */
4622 static void
4623 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4625 while (handled_component_p (*expr_p))
4626 expr_p = &TREE_OPERAND (*expr_p, 0);
4628 /* Do not allow an SSA name as the temporary. */
4629 if (is_gimple_reg (*expr_p))
4630 *expr_p = internal_get_tmp_var (*expr_p, seq_p, NULL, false, false, true);
4633 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4634 a call to __builtin_memcpy. */
4636 static enum gimplify_status
4637 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4638 gimple_seq *seq_p)
4640 tree t, to, to_ptr, from, from_ptr;
4641 gcall *gs;
4642 location_t loc = EXPR_LOCATION (*expr_p);
4644 to = TREE_OPERAND (*expr_p, 0);
4645 from = TREE_OPERAND (*expr_p, 1);
4647 /* Mark the RHS addressable. Beware that it may not be possible to do so
4648 directly if a temporary has been created by the gimplification. */
4649 prepare_gimple_addressable (&from, seq_p);
4651 mark_addressable (from);
4652 from_ptr = build_fold_addr_expr_loc (loc, from);
4653 gimplify_arg (&from_ptr, seq_p, loc);
4655 mark_addressable (to);
4656 to_ptr = build_fold_addr_expr_loc (loc, to);
4657 gimplify_arg (&to_ptr, seq_p, loc);
4659 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4661 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4662 gimple_call_set_alloca_for_var (gs, true);
4664 if (want_value)
4666 /* tmp = memcpy() */
4667 t = create_tmp_var (TREE_TYPE (to_ptr));
4668 gimple_call_set_lhs (gs, t);
4669 gimplify_seq_add_stmt (seq_p, gs);
4671 *expr_p = build_simple_mem_ref (t);
4672 return GS_ALL_DONE;
4675 gimplify_seq_add_stmt (seq_p, gs);
4676 *expr_p = NULL;
4677 return GS_ALL_DONE;
4680 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4681 a call to __builtin_memset. In this case we know that the RHS is
4682 a CONSTRUCTOR with an empty element list. */
4684 static enum gimplify_status
4685 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4686 gimple_seq *seq_p)
4688 tree t, from, to, to_ptr;
4689 gcall *gs;
4690 location_t loc = EXPR_LOCATION (*expr_p);
4692 /* Assert our assumptions, to abort instead of producing wrong code
4693 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4694 not be immediately exposed. */
4695 from = TREE_OPERAND (*expr_p, 1);
4696 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4697 from = TREE_OPERAND (from, 0);
4699 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4700 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4702 /* Now proceed. */
4703 to = TREE_OPERAND (*expr_p, 0);
4705 to_ptr = build_fold_addr_expr_loc (loc, to);
4706 gimplify_arg (&to_ptr, seq_p, loc);
4707 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4709 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4711 if (want_value)
4713 /* tmp = memset() */
4714 t = create_tmp_var (TREE_TYPE (to_ptr));
4715 gimple_call_set_lhs (gs, t);
4716 gimplify_seq_add_stmt (seq_p, gs);
4718 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4719 return GS_ALL_DONE;
4722 gimplify_seq_add_stmt (seq_p, gs);
4723 *expr_p = NULL;
4724 return GS_ALL_DONE;
4727 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4728 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4729 assignment. Return non-null if we detect a potential overlap. */
4731 struct gimplify_init_ctor_preeval_data
4733 /* The base decl of the lhs object. May be NULL, in which case we
4734 have to assume the lhs is indirect. */
4735 tree lhs_base_decl;
4737 /* The alias set of the lhs object. */
4738 alias_set_type lhs_alias_set;
4741 static tree
4742 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4744 struct gimplify_init_ctor_preeval_data *data
4745 = (struct gimplify_init_ctor_preeval_data *) xdata;
4746 tree t = *tp;
4748 /* If we find the base object, obviously we have overlap. */
4749 if (data->lhs_base_decl == t)
4750 return t;
4752 /* If the constructor component is indirect, determine if we have a
4753 potential overlap with the lhs. The only bits of information we
4754 have to go on at this point are addressability and alias sets. */
4755 if ((INDIRECT_REF_P (t)
4756 || TREE_CODE (t) == MEM_REF)
4757 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4758 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4759 return t;
4761 /* If the constructor component is a call, determine if it can hide a
4762 potential overlap with the lhs through an INDIRECT_REF like above.
4763 ??? Ugh - this is completely broken. In fact this whole analysis
4764 doesn't look conservative. */
4765 if (TREE_CODE (t) == CALL_EXPR)
4767 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4769 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4770 if (POINTER_TYPE_P (TREE_VALUE (type))
4771 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4772 && alias_sets_conflict_p (data->lhs_alias_set,
4773 get_alias_set
4774 (TREE_TYPE (TREE_VALUE (type)))))
4775 return t;
4778 if (IS_TYPE_OR_DECL_P (t))
4779 *walk_subtrees = 0;
4780 return NULL;
4783 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4784 force values that overlap with the lhs (as described by *DATA)
4785 into temporaries. */
4787 static void
4788 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4789 struct gimplify_init_ctor_preeval_data *data)
4791 enum gimplify_status one;
4793 /* If the value is constant, then there's nothing to pre-evaluate. */
4794 if (TREE_CONSTANT (*expr_p))
4796 /* Ensure it does not have side effects, it might contain a reference to
4797 the object we're initializing. */
4798 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4799 return;
4802 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4803 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4804 return;
4806 /* Recurse for nested constructors. */
4807 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4809 unsigned HOST_WIDE_INT ix;
4810 constructor_elt *ce;
4811 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4813 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4814 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4816 return;
4819 /* If this is a variable sized type, we must remember the size. */
4820 maybe_with_size_expr (expr_p);
4822 /* Gimplify the constructor element to something appropriate for the rhs
4823 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4824 the gimplifier will consider this a store to memory. Doing this
4825 gimplification now means that we won't have to deal with complicated
4826 language-specific trees, nor trees like SAVE_EXPR that can induce
4827 exponential search behavior. */
4828 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4829 if (one == GS_ERROR)
4831 *expr_p = NULL;
4832 return;
4835 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4836 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4837 always be true for all scalars, since is_gimple_mem_rhs insists on a
4838 temporary variable for them. */
4839 if (DECL_P (*expr_p))
4840 return;
4842 /* If this is of variable size, we have no choice but to assume it doesn't
4843 overlap since we can't make a temporary for it. */
4844 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4845 return;
4847 /* Otherwise, we must search for overlap ... */
4848 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4849 return;
4851 /* ... and if found, force the value into a temporary. */
4852 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4855 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4856 a RANGE_EXPR in a CONSTRUCTOR for an array.
4858 var = lower;
4859 loop_entry:
4860 object[var] = value;
4861 if (var == upper)
4862 goto loop_exit;
4863 var = var + 1;
4864 goto loop_entry;
4865 loop_exit:
4867 We increment var _after_ the loop exit check because we might otherwise
4868 fail if upper == TYPE_MAX_VALUE (type for upper).
4870 Note that we never have to deal with SAVE_EXPRs here, because this has
4871 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4873 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4874 gimple_seq *, bool);
4876 static void
4877 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4878 tree value, tree array_elt_type,
4879 gimple_seq *pre_p, bool cleared)
4881 tree loop_entry_label, loop_exit_label, fall_thru_label;
4882 tree var, var_type, cref, tmp;
4884 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4885 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4886 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4888 /* Create and initialize the index variable. */
4889 var_type = TREE_TYPE (upper);
4890 var = create_tmp_var (var_type);
4891 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4893 /* Add the loop entry label. */
4894 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4896 /* Build the reference. */
4897 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4898 var, NULL_TREE, NULL_TREE);
4900 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4901 the store. Otherwise just assign value to the reference. */
4903 if (TREE_CODE (value) == CONSTRUCTOR)
4904 /* NB we might have to call ourself recursively through
4905 gimplify_init_ctor_eval if the value is a constructor. */
4906 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4907 pre_p, cleared);
4908 else
4910 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
4911 != GS_ERROR)
4912 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4915 /* We exit the loop when the index var is equal to the upper bound. */
4916 gimplify_seq_add_stmt (pre_p,
4917 gimple_build_cond (EQ_EXPR, var, upper,
4918 loop_exit_label, fall_thru_label));
4920 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4922 /* Otherwise, increment the index var... */
4923 tmp = build2 (PLUS_EXPR, var_type, var,
4924 fold_convert (var_type, integer_one_node));
4925 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4927 /* ...and jump back to the loop entry. */
4928 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4930 /* Add the loop exit label. */
4931 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4934 /* A subroutine of gimplify_init_constructor. Generate individual
4935 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4936 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4937 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4938 zeroed first. */
4940 static void
4941 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4942 gimple_seq *pre_p, bool cleared)
4944 tree array_elt_type = NULL;
4945 unsigned HOST_WIDE_INT ix;
4946 tree purpose, value;
4948 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4949 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4951 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4953 tree cref;
4955 /* NULL values are created above for gimplification errors. */
4956 if (value == NULL)
4957 continue;
4959 if (cleared && initializer_zerop (value))
4960 continue;
4962 /* ??? Here's to hoping the front end fills in all of the indices,
4963 so we don't have to figure out what's missing ourselves. */
4964 gcc_assert (purpose);
4966 /* Skip zero-sized fields, unless value has side-effects. This can
4967 happen with calls to functions returning a empty type, which
4968 we shouldn't discard. As a number of downstream passes don't
4969 expect sets of empty type fields, we rely on the gimplification of
4970 the MODIFY_EXPR we make below to drop the assignment statement. */
4971 if (!TREE_SIDE_EFFECTS (value)
4972 && TREE_CODE (purpose) == FIELD_DECL
4973 && is_empty_type (TREE_TYPE (purpose)))
4974 continue;
4976 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4977 whole range. */
4978 if (TREE_CODE (purpose) == RANGE_EXPR)
4980 tree lower = TREE_OPERAND (purpose, 0);
4981 tree upper = TREE_OPERAND (purpose, 1);
4983 /* If the lower bound is equal to upper, just treat it as if
4984 upper was the index. */
4985 if (simple_cst_equal (lower, upper))
4986 purpose = upper;
4987 else
4989 gimplify_init_ctor_eval_range (object, lower, upper, value,
4990 array_elt_type, pre_p, cleared);
4991 continue;
4995 if (array_elt_type)
4997 /* Do not use bitsizetype for ARRAY_REF indices. */
4998 if (TYPE_DOMAIN (TREE_TYPE (object)))
4999 purpose
5000 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
5001 purpose);
5002 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
5003 purpose, NULL_TREE, NULL_TREE);
5005 else
5007 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
5008 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
5009 unshare_expr (object), purpose, NULL_TREE);
5012 if (TREE_CODE (value) == CONSTRUCTOR
5013 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
5014 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5015 pre_p, cleared);
5016 else
5018 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
5019 gimplify_and_add (init, pre_p);
5020 ggc_free (init);
5025 /* Return the appropriate RHS predicate for this LHS. */
5027 gimple_predicate
5028 rhs_predicate_for (tree lhs)
5030 if (is_gimple_reg (lhs))
5031 return is_gimple_reg_rhs_or_call;
5032 else
5033 return is_gimple_mem_rhs_or_call;
5036 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5037 before the LHS has been gimplified. */
5039 static gimple_predicate
5040 initial_rhs_predicate_for (tree lhs)
5042 if (is_gimple_reg_type (TREE_TYPE (lhs)))
5043 return is_gimple_reg_rhs_or_call;
5044 else
5045 return is_gimple_mem_rhs_or_call;
5048 /* Gimplify a C99 compound literal expression. This just means adding
5049 the DECL_EXPR before the current statement and using its anonymous
5050 decl instead. */
5052 static enum gimplify_status
5053 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
5054 bool (*gimple_test_f) (tree),
5055 fallback_t fallback)
5057 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
5058 tree decl = DECL_EXPR_DECL (decl_s);
5059 tree init = DECL_INITIAL (decl);
5060 /* Mark the decl as addressable if the compound literal
5061 expression is addressable now, otherwise it is marked too late
5062 after we gimplify the initialization expression. */
5063 if (TREE_ADDRESSABLE (*expr_p))
5064 TREE_ADDRESSABLE (decl) = 1;
5065 /* Otherwise, if we don't need an lvalue and have a literal directly
5066 substitute it. Check if it matches the gimple predicate, as
5067 otherwise we'd generate a new temporary, and we can as well just
5068 use the decl we already have. */
5069 else if (!TREE_ADDRESSABLE (decl)
5070 && !TREE_THIS_VOLATILE (decl)
5071 && init
5072 && (fallback & fb_lvalue) == 0
5073 && gimple_test_f (init))
5075 *expr_p = init;
5076 return GS_OK;
5079 /* If the decl is not addressable, then it is being used in some
5080 expression or on the right hand side of a statement, and it can
5081 be put into a readonly data section. */
5082 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
5083 TREE_READONLY (decl) = 1;
5085 /* This decl isn't mentioned in the enclosing block, so add it to the
5086 list of temps. FIXME it seems a bit of a kludge to say that
5087 anonymous artificial vars aren't pushed, but everything else is. */
5088 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
5089 gimple_add_tmp_var (decl);
5091 gimplify_and_add (decl_s, pre_p);
5092 *expr_p = decl;
5093 return GS_OK;
5096 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5097 return a new CONSTRUCTOR if something changed. */
5099 static tree
5100 optimize_compound_literals_in_ctor (tree orig_ctor)
5102 tree ctor = orig_ctor;
5103 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
5104 unsigned int idx, num = vec_safe_length (elts);
5106 for (idx = 0; idx < num; idx++)
5108 tree value = (*elts)[idx].value;
5109 tree newval = value;
5110 if (TREE_CODE (value) == CONSTRUCTOR)
5111 newval = optimize_compound_literals_in_ctor (value);
5112 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
5114 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
5115 tree decl = DECL_EXPR_DECL (decl_s);
5116 tree init = DECL_INITIAL (decl);
5118 if (!TREE_ADDRESSABLE (value)
5119 && !TREE_ADDRESSABLE (decl)
5120 && init
5121 && TREE_CODE (init) == CONSTRUCTOR)
5122 newval = optimize_compound_literals_in_ctor (init);
5124 if (newval == value)
5125 continue;
5127 if (ctor == orig_ctor)
5129 ctor = copy_node (orig_ctor);
5130 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
5131 elts = CONSTRUCTOR_ELTS (ctor);
5133 (*elts)[idx].value = newval;
5135 return ctor;
5138 /* A subroutine of gimplify_modify_expr. Break out elements of a
5139 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5141 Note that we still need to clear any elements that don't have explicit
5142 initializers, so if not all elements are initialized we keep the
5143 original MODIFY_EXPR, we just remove all of the constructor elements.
5145 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5146 GS_ERROR if we would have to create a temporary when gimplifying
5147 this constructor. Otherwise, return GS_OK.
5149 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5151 static enum gimplify_status
5152 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5153 bool want_value, bool notify_temp_creation)
5155 tree object, ctor, type;
5156 enum gimplify_status ret;
5157 vec<constructor_elt, va_gc> *elts;
5158 bool cleared = false;
5159 bool is_empty_ctor = false;
5160 bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
5162 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
5164 if (!notify_temp_creation)
5166 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5167 is_gimple_lvalue, fb_lvalue);
5168 if (ret == GS_ERROR)
5169 return ret;
5172 object = TREE_OPERAND (*expr_p, 0);
5173 ctor = TREE_OPERAND (*expr_p, 1)
5174 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
5175 type = TREE_TYPE (ctor);
5176 elts = CONSTRUCTOR_ELTS (ctor);
5177 ret = GS_ALL_DONE;
5179 switch (TREE_CODE (type))
5181 case RECORD_TYPE:
5182 case UNION_TYPE:
5183 case QUAL_UNION_TYPE:
5184 case ARRAY_TYPE:
5186 /* Use readonly data for initializers of this or smaller size
5187 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5188 ratio. */
5189 const HOST_WIDE_INT min_unique_size = 64;
5190 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5191 is smaller than this, use readonly data. */
5192 const int unique_nonzero_ratio = 8;
5193 /* True if a single access of the object must be ensured. This is the
5194 case if the target is volatile, the type is non-addressable and more
5195 than one field need to be assigned. */
5196 const bool ensure_single_access
5197 = TREE_THIS_VOLATILE (object)
5198 && !TREE_ADDRESSABLE (type)
5199 && vec_safe_length (elts) > 1;
5200 struct gimplify_init_ctor_preeval_data preeval_data;
5201 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
5202 HOST_WIDE_INT num_unique_nonzero_elements;
5203 bool complete_p, valid_const_initializer;
5205 /* Aggregate types must lower constructors to initialization of
5206 individual elements. The exception is that a CONSTRUCTOR node
5207 with no elements indicates zero-initialization of the whole. */
5208 if (vec_safe_is_empty (elts))
5210 if (notify_temp_creation)
5211 return GS_OK;
5213 /* The var will be initialized and so appear on lhs of
5214 assignment, it can't be TREE_READONLY anymore. */
5215 if (VAR_P (object))
5216 TREE_READONLY (object) = 0;
5218 is_empty_ctor = true;
5219 break;
5222 /* Fetch information about the constructor to direct later processing.
5223 We might want to make static versions of it in various cases, and
5224 can only do so if it known to be a valid constant initializer. */
5225 valid_const_initializer
5226 = categorize_ctor_elements (ctor, &num_nonzero_elements,
5227 &num_unique_nonzero_elements,
5228 &num_ctor_elements, &complete_p);
5230 /* If a const aggregate variable is being initialized, then it
5231 should never be a lose to promote the variable to be static. */
5232 if (valid_const_initializer
5233 && num_nonzero_elements > 1
5234 && TREE_READONLY (object)
5235 && VAR_P (object)
5236 && !DECL_REGISTER (object)
5237 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
5238 /* For ctors that have many repeated nonzero elements
5239 represented through RANGE_EXPRs, prefer initializing
5240 those through runtime loops over copies of large amounts
5241 of data from readonly data section. */
5242 && (num_unique_nonzero_elements
5243 > num_nonzero_elements / unique_nonzero_ratio
5244 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
5245 <= (unsigned HOST_WIDE_INT) min_unique_size)))
5247 if (notify_temp_creation)
5248 return GS_ERROR;
5250 DECL_INITIAL (object) = ctor;
5251 TREE_STATIC (object) = 1;
5252 if (!DECL_NAME (object))
5253 DECL_NAME (object) = create_tmp_var_name ("C");
5254 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
5256 /* ??? C++ doesn't automatically append a .<number> to the
5257 assembler name, and even when it does, it looks at FE private
5258 data structures to figure out what that number should be,
5259 which are not set for this variable. I suppose this is
5260 important for local statics for inline functions, which aren't
5261 "local" in the object file sense. So in order to get a unique
5262 TU-local symbol, we must invoke the lhd version now. */
5263 lhd_set_decl_assembler_name (object);
5265 *expr_p = NULL_TREE;
5266 break;
5269 /* The var will be initialized and so appear on lhs of
5270 assignment, it can't be TREE_READONLY anymore. */
5271 if (VAR_P (object) && !notify_temp_creation)
5272 TREE_READONLY (object) = 0;
5274 /* If there are "lots" of initialized elements, even discounting
5275 those that are not address constants (and thus *must* be
5276 computed at runtime), then partition the constructor into
5277 constant and non-constant parts. Block copy the constant
5278 parts in, then generate code for the non-constant parts. */
5279 /* TODO. There's code in cp/typeck.cc to do this. */
5281 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
5282 /* store_constructor will ignore the clearing of variable-sized
5283 objects. Initializers for such objects must explicitly set
5284 every field that needs to be set. */
5285 cleared = false;
5286 else if (!complete_p)
5287 /* If the constructor isn't complete, clear the whole object
5288 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5290 ??? This ought not to be needed. For any element not present
5291 in the initializer, we should simply set them to zero. Except
5292 we'd need to *find* the elements that are not present, and that
5293 requires trickery to avoid quadratic compile-time behavior in
5294 large cases or excessive memory use in small cases. */
5295 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5296 else if (num_ctor_elements - num_nonzero_elements
5297 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5298 && num_nonzero_elements < num_ctor_elements / 4)
5299 /* If there are "lots" of zeros, it's more efficient to clear
5300 the memory and then set the nonzero elements. */
5301 cleared = true;
5302 else if (ensure_single_access && num_nonzero_elements == 0)
5303 /* If a single access to the target must be ensured and all elements
5304 are zero, then it's optimal to clear whatever their number. */
5305 cleared = true;
5306 else
5307 cleared = false;
5309 /* If there are "lots" of initialized elements, and all of them
5310 are valid address constants, then the entire initializer can
5311 be dropped to memory, and then memcpy'd out. Don't do this
5312 for sparse arrays, though, as it's more efficient to follow
5313 the standard CONSTRUCTOR behavior of memset followed by
5314 individual element initialization. Also don't do this for small
5315 all-zero initializers (which aren't big enough to merit
5316 clearing), and don't try to make bitwise copies of
5317 TREE_ADDRESSABLE types. */
5318 if (valid_const_initializer
5319 && complete_p
5320 && !(cleared || num_nonzero_elements == 0)
5321 && !TREE_ADDRESSABLE (type))
5323 HOST_WIDE_INT size = int_size_in_bytes (type);
5324 unsigned int align;
5326 /* ??? We can still get unbounded array types, at least
5327 from the C++ front end. This seems wrong, but attempt
5328 to work around it for now. */
5329 if (size < 0)
5331 size = int_size_in_bytes (TREE_TYPE (object));
5332 if (size >= 0)
5333 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5336 /* Find the maximum alignment we can assume for the object. */
5337 /* ??? Make use of DECL_OFFSET_ALIGN. */
5338 if (DECL_P (object))
5339 align = DECL_ALIGN (object);
5340 else
5341 align = TYPE_ALIGN (type);
5343 /* Do a block move either if the size is so small as to make
5344 each individual move a sub-unit move on average, or if it
5345 is so large as to make individual moves inefficient. */
5346 if (size > 0
5347 && num_nonzero_elements > 1
5348 /* For ctors that have many repeated nonzero elements
5349 represented through RANGE_EXPRs, prefer initializing
5350 those through runtime loops over copies of large amounts
5351 of data from readonly data section. */
5352 && (num_unique_nonzero_elements
5353 > num_nonzero_elements / unique_nonzero_ratio
5354 || size <= min_unique_size)
5355 && (size < num_nonzero_elements
5356 || !can_move_by_pieces (size, align)))
5358 if (notify_temp_creation)
5359 return GS_ERROR;
5361 walk_tree (&ctor, force_labels_r, NULL, NULL);
5362 ctor = tree_output_constant_def (ctor);
5363 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5364 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5365 TREE_OPERAND (*expr_p, 1) = ctor;
5367 /* This is no longer an assignment of a CONSTRUCTOR, but
5368 we still may have processing to do on the LHS. So
5369 pretend we didn't do anything here to let that happen. */
5370 return GS_UNHANDLED;
5374 /* If a single access to the target must be ensured and there are
5375 nonzero elements or the zero elements are not assigned en masse,
5376 initialize the target from a temporary. */
5377 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5379 if (notify_temp_creation)
5380 return GS_ERROR;
5382 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5383 TREE_OPERAND (*expr_p, 0) = temp;
5384 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5385 *expr_p,
5386 build2 (MODIFY_EXPR, void_type_node,
5387 object, temp));
5388 return GS_OK;
5391 if (notify_temp_creation)
5392 return GS_OK;
5394 /* If there are nonzero elements and if needed, pre-evaluate to capture
5395 elements overlapping with the lhs into temporaries. We must do this
5396 before clearing to fetch the values before they are zeroed-out. */
5397 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5399 preeval_data.lhs_base_decl = get_base_address (object);
5400 if (!DECL_P (preeval_data.lhs_base_decl))
5401 preeval_data.lhs_base_decl = NULL;
5402 preeval_data.lhs_alias_set = get_alias_set (object);
5404 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5405 pre_p, post_p, &preeval_data);
5408 bool ctor_has_side_effects_p
5409 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5411 if (cleared)
5413 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5414 Note that we still have to gimplify, in order to handle the
5415 case of variable sized types. Avoid shared tree structures. */
5416 CONSTRUCTOR_ELTS (ctor) = NULL;
5417 TREE_SIDE_EFFECTS (ctor) = 0;
5418 object = unshare_expr (object);
5419 gimplify_stmt (expr_p, pre_p);
5422 /* If we have not block cleared the object, or if there are nonzero
5423 elements in the constructor, or if the constructor has side effects,
5424 add assignments to the individual scalar fields of the object. */
5425 if (!cleared
5426 || num_nonzero_elements > 0
5427 || ctor_has_side_effects_p)
5428 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5430 *expr_p = NULL_TREE;
5432 break;
5434 case COMPLEX_TYPE:
5436 tree r, i;
5438 if (notify_temp_creation)
5439 return GS_OK;
5441 /* Extract the real and imaginary parts out of the ctor. */
5442 gcc_assert (elts->length () == 2);
5443 r = (*elts)[0].value;
5444 i = (*elts)[1].value;
5445 if (r == NULL || i == NULL)
5447 tree zero = build_zero_cst (TREE_TYPE (type));
5448 if (r == NULL)
5449 r = zero;
5450 if (i == NULL)
5451 i = zero;
5454 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5455 represent creation of a complex value. */
5456 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5458 ctor = build_complex (type, r, i);
5459 TREE_OPERAND (*expr_p, 1) = ctor;
5461 else
5463 ctor = build2 (COMPLEX_EXPR, type, r, i);
5464 TREE_OPERAND (*expr_p, 1) = ctor;
5465 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5466 pre_p,
5467 post_p,
5468 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5469 fb_rvalue);
5472 break;
5474 case VECTOR_TYPE:
5476 unsigned HOST_WIDE_INT ix;
5477 constructor_elt *ce;
5479 if (notify_temp_creation)
5480 return GS_OK;
5482 /* Vector types use CONSTRUCTOR all the way through gimple
5483 compilation as a general initializer. */
5484 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5486 enum gimplify_status tret;
5487 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5488 fb_rvalue);
5489 if (tret == GS_ERROR)
5490 ret = GS_ERROR;
5491 else if (TREE_STATIC (ctor)
5492 && !initializer_constant_valid_p (ce->value,
5493 TREE_TYPE (ce->value)))
5494 TREE_STATIC (ctor) = 0;
5496 recompute_constructor_flags (ctor);
5498 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5499 if (TREE_CONSTANT (ctor))
5501 bool constant_p = true;
5502 tree value;
5504 /* Even when ctor is constant, it might contain non-*_CST
5505 elements, such as addresses or trapping values like
5506 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5507 in VECTOR_CST nodes. */
5508 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5509 if (!CONSTANT_CLASS_P (value))
5511 constant_p = false;
5512 break;
5515 if (constant_p)
5517 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5518 break;
5522 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5523 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5525 break;
5527 default:
5528 /* So how did we get a CONSTRUCTOR for a scalar type? */
5529 gcc_unreachable ();
5532 if (ret == GS_ERROR)
5533 return GS_ERROR;
5534 /* If we have gimplified both sides of the initializer but have
5535 not emitted an assignment, do so now. */
5536 if (*expr_p
5537 /* If the type is an empty type, we don't need to emit the
5538 assignment. */
5539 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
5541 tree lhs = TREE_OPERAND (*expr_p, 0);
5542 tree rhs = TREE_OPERAND (*expr_p, 1);
5543 if (want_value && object == lhs)
5544 lhs = unshare_expr (lhs);
5545 gassign *init = gimple_build_assign (lhs, rhs);
5546 gimplify_seq_add_stmt (pre_p, init);
5548 if (want_value)
5550 *expr_p = object;
5551 ret = GS_OK;
5553 else
5555 *expr_p = NULL;
5556 ret = GS_ALL_DONE;
5559 /* If the user requests to initialize automatic variables, we
5560 should initialize paddings inside the variable. Add a call to
5561 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5562 initialize paddings of object always to zero regardless of
5563 INIT_TYPE. Note, we will not insert this call if the aggregate
5564 variable has be completely cleared already or it's initialized
5565 with an empty constructor. We cannot insert this call if the
5566 variable is a gimple register since __builtin_clear_padding will take
5567 the address of the variable. As a result, if a long double/_Complex long
5568 double variable will be spilled into stack later, its padding cannot
5569 be cleared with __builtin_clear_padding. We should clear its padding
5570 when it is spilled into memory. */
5571 if (is_init_expr
5572 && !is_gimple_reg (object)
5573 && clear_padding_type_may_have_padding_p (type)
5574 && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
5575 || !AGGREGATE_TYPE_P (type))
5576 && is_var_need_auto_init (object))
5577 gimple_add_padding_init_for_auto_var (object, false, pre_p);
5579 return ret;
5582 /* Given a pointer value OP0, return a simplified version of an
5583 indirection through OP0, or NULL_TREE if no simplification is
5584 possible. This may only be applied to a rhs of an expression.
5585 Note that the resulting type may be different from the type pointed
5586 to in the sense that it is still compatible from the langhooks
5587 point of view. */
5589 static tree
5590 gimple_fold_indirect_ref_rhs (tree t)
5592 return gimple_fold_indirect_ref (t);
5595 /* Subroutine of gimplify_modify_expr to do simplifications of
5596 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5597 something changes. */
5599 static enum gimplify_status
5600 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5601 gimple_seq *pre_p, gimple_seq *post_p,
5602 bool want_value)
5604 enum gimplify_status ret = GS_UNHANDLED;
5605 bool changed;
5609 changed = false;
5610 switch (TREE_CODE (*from_p))
5612 case VAR_DECL:
5613 /* If we're assigning from a read-only variable initialized with
5614 a constructor and not volatile, do the direct assignment from
5615 the constructor, but only if the target is not volatile either
5616 since this latter assignment might end up being done on a per
5617 field basis. However, if the target is volatile and the type
5618 is aggregate and non-addressable, gimplify_init_constructor
5619 knows that it needs to ensure a single access to the target
5620 and it will return GS_OK only in this case. */
5621 if (TREE_READONLY (*from_p)
5622 && DECL_INITIAL (*from_p)
5623 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5624 && !TREE_THIS_VOLATILE (*from_p)
5625 && (!TREE_THIS_VOLATILE (*to_p)
5626 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5627 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5629 tree old_from = *from_p;
5630 enum gimplify_status subret;
5632 /* Move the constructor into the RHS. */
5633 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5635 /* Let's see if gimplify_init_constructor will need to put
5636 it in memory. */
5637 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5638 false, true);
5639 if (subret == GS_ERROR)
5641 /* If so, revert the change. */
5642 *from_p = old_from;
5644 else
5646 ret = GS_OK;
5647 changed = true;
5650 break;
5651 case INDIRECT_REF:
5652 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p)))
5653 /* If we have code like
5655 *(const A*)(A*)&x
5657 where the type of "x" is a (possibly cv-qualified variant
5658 of "A"), treat the entire expression as identical to "x".
5659 This kind of code arises in C++ when an object is bound
5660 to a const reference, and if "x" is a TARGET_EXPR we want
5661 to take advantage of the optimization below. But not if
5662 the type is TREE_ADDRESSABLE; then C++17 says that the
5663 TARGET_EXPR needs to be a temporary. */
5664 if (tree t
5665 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)))
5667 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5668 if (TREE_THIS_VOLATILE (t) != volatile_p)
5670 if (DECL_P (t))
5671 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5672 build_fold_addr_expr (t));
5673 if (REFERENCE_CLASS_P (t))
5674 TREE_THIS_VOLATILE (t) = volatile_p;
5676 *from_p = t;
5677 ret = GS_OK;
5678 changed = true;
5680 break;
5682 case TARGET_EXPR:
5684 /* If we are initializing something from a TARGET_EXPR, strip the
5685 TARGET_EXPR and initialize it directly, if possible. This can't
5686 be done if the initializer is void, since that implies that the
5687 temporary is set in some non-trivial way.
5689 ??? What about code that pulls out the temp and uses it
5690 elsewhere? I think that such code never uses the TARGET_EXPR as
5691 an initializer. If I'm wrong, we'll die because the temp won't
5692 have any RTL. In that case, I guess we'll need to replace
5693 references somehow. */
5694 tree init = TARGET_EXPR_INITIAL (*from_p);
5696 if (init
5697 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5698 || !TARGET_EXPR_NO_ELIDE (*from_p))
5699 && !VOID_TYPE_P (TREE_TYPE (init)))
5701 *from_p = init;
5702 ret = GS_OK;
5703 changed = true;
5706 break;
5708 case COMPOUND_EXPR:
5709 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5710 caught. */
5711 gimplify_compound_expr (from_p, pre_p, true);
5712 ret = GS_OK;
5713 changed = true;
5714 break;
5716 case CONSTRUCTOR:
5717 /* If we already made some changes, let the front end have a
5718 crack at this before we break it down. */
5719 if (ret != GS_UNHANDLED)
5720 break;
5722 /* If we're initializing from a CONSTRUCTOR, break this into
5723 individual MODIFY_EXPRs. */
5724 ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5725 false);
5726 return ret;
5728 case COND_EXPR:
5729 /* If we're assigning to a non-register type, push the assignment
5730 down into the branches. This is mandatory for ADDRESSABLE types,
5731 since we cannot generate temporaries for such, but it saves a
5732 copy in other cases as well. */
5733 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5735 /* This code should mirror the code in gimplify_cond_expr. */
5736 enum tree_code code = TREE_CODE (*expr_p);
5737 tree cond = *from_p;
5738 tree result = *to_p;
5740 ret = gimplify_expr (&result, pre_p, post_p,
5741 is_gimple_lvalue, fb_lvalue);
5742 if (ret != GS_ERROR)
5743 ret = GS_OK;
5745 /* If we are going to write RESULT more than once, clear
5746 TREE_READONLY flag, otherwise we might incorrectly promote
5747 the variable to static const and initialize it at compile
5748 time in one of the branches. */
5749 if (VAR_P (result)
5750 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5751 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5752 TREE_READONLY (result) = 0;
5753 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5754 TREE_OPERAND (cond, 1)
5755 = build2 (code, void_type_node, result,
5756 TREE_OPERAND (cond, 1));
5757 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5758 TREE_OPERAND (cond, 2)
5759 = build2 (code, void_type_node, unshare_expr (result),
5760 TREE_OPERAND (cond, 2));
5762 TREE_TYPE (cond) = void_type_node;
5763 recalculate_side_effects (cond);
5765 if (want_value)
5767 gimplify_and_add (cond, pre_p);
5768 *expr_p = unshare_expr (result);
5770 else
5771 *expr_p = cond;
5772 return ret;
5774 break;
5776 case CALL_EXPR:
5777 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5778 return slot so that we don't generate a temporary. */
5779 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5780 && aggregate_value_p (*from_p, *from_p))
5782 bool use_target;
5784 if (!(rhs_predicate_for (*to_p))(*from_p))
5785 /* If we need a temporary, *to_p isn't accurate. */
5786 use_target = false;
5787 /* It's OK to use the return slot directly unless it's an NRV. */
5788 else if (TREE_CODE (*to_p) == RESULT_DECL
5789 && DECL_NAME (*to_p) == NULL_TREE
5790 && needs_to_live_in_memory (*to_p))
5791 use_target = true;
5792 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5793 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5794 /* Don't force regs into memory. */
5795 use_target = false;
5796 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5797 /* It's OK to use the target directly if it's being
5798 initialized. */
5799 use_target = true;
5800 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5801 != INTEGER_CST)
5802 /* Always use the target and thus RSO for variable-sized types.
5803 GIMPLE cannot deal with a variable-sized assignment
5804 embedded in a call statement. */
5805 use_target = true;
5806 else if (TREE_CODE (*to_p) != SSA_NAME
5807 && (!is_gimple_variable (*to_p)
5808 || needs_to_live_in_memory (*to_p)))
5809 /* Don't use the original target if it's already addressable;
5810 if its address escapes, and the called function uses the
5811 NRV optimization, a conforming program could see *to_p
5812 change before the called function returns; see c++/19317.
5813 When optimizing, the return_slot pass marks more functions
5814 as safe after we have escape info. */
5815 use_target = false;
5816 else
5817 use_target = true;
5819 if (use_target)
5821 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5822 mark_addressable (*to_p);
5825 break;
5827 case WITH_SIZE_EXPR:
5828 /* Likewise for calls that return an aggregate of non-constant size,
5829 since we would not be able to generate a temporary at all. */
5830 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5832 *from_p = TREE_OPERAND (*from_p, 0);
5833 /* We don't change ret in this case because the
5834 WITH_SIZE_EXPR might have been added in
5835 gimplify_modify_expr, so returning GS_OK would lead to an
5836 infinite loop. */
5837 changed = true;
5839 break;
5841 /* If we're initializing from a container, push the initialization
5842 inside it. */
5843 case CLEANUP_POINT_EXPR:
5844 case BIND_EXPR:
5845 case STATEMENT_LIST:
5847 tree wrap = *from_p;
5848 tree t;
5850 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5851 fb_lvalue);
5852 if (ret != GS_ERROR)
5853 ret = GS_OK;
5855 t = voidify_wrapper_expr (wrap, *expr_p);
5856 gcc_assert (t == *expr_p);
5858 if (want_value)
5860 gimplify_and_add (wrap, pre_p);
5861 *expr_p = unshare_expr (*to_p);
5863 else
5864 *expr_p = wrap;
5865 return GS_OK;
5868 case NOP_EXPR:
5869 /* Pull out compound literal expressions from a NOP_EXPR.
5870 Those are created in the C FE to drop qualifiers during
5871 lvalue conversion. */
5872 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
5873 && tree_ssa_useless_type_conversion (*from_p))
5875 *from_p = TREE_OPERAND (*from_p, 0);
5876 ret = GS_OK;
5877 changed = true;
5879 break;
5881 case COMPOUND_LITERAL_EXPR:
5883 tree complit = TREE_OPERAND (*expr_p, 1);
5884 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5885 tree decl = DECL_EXPR_DECL (decl_s);
5886 tree init = DECL_INITIAL (decl);
5888 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5889 into struct T x = { 0, 1, 2 } if the address of the
5890 compound literal has never been taken. */
5891 if (!TREE_ADDRESSABLE (complit)
5892 && !TREE_ADDRESSABLE (decl)
5893 && init)
5895 *expr_p = copy_node (*expr_p);
5896 TREE_OPERAND (*expr_p, 1) = init;
5897 return GS_OK;
5901 default:
5902 break;
5905 while (changed);
5907 return ret;
5911 /* Return true if T looks like a valid GIMPLE statement. */
5913 static bool
5914 is_gimple_stmt (tree t)
5916 const enum tree_code code = TREE_CODE (t);
5918 switch (code)
5920 case NOP_EXPR:
5921 /* The only valid NOP_EXPR is the empty statement. */
5922 return IS_EMPTY_STMT (t);
5924 case BIND_EXPR:
5925 case COND_EXPR:
5926 /* These are only valid if they're void. */
5927 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5929 case SWITCH_EXPR:
5930 case GOTO_EXPR:
5931 case RETURN_EXPR:
5932 case LABEL_EXPR:
5933 case CASE_LABEL_EXPR:
5934 case TRY_CATCH_EXPR:
5935 case TRY_FINALLY_EXPR:
5936 case EH_FILTER_EXPR:
5937 case CATCH_EXPR:
5938 case ASM_EXPR:
5939 case STATEMENT_LIST:
5940 case OACC_PARALLEL:
5941 case OACC_KERNELS:
5942 case OACC_SERIAL:
5943 case OACC_DATA:
5944 case OACC_HOST_DATA:
5945 case OACC_DECLARE:
5946 case OACC_UPDATE:
5947 case OACC_ENTER_DATA:
5948 case OACC_EXIT_DATA:
5949 case OACC_CACHE:
5950 case OMP_PARALLEL:
5951 case OMP_FOR:
5952 case OMP_SIMD:
5953 case OMP_DISTRIBUTE:
5954 case OMP_LOOP:
5955 case OACC_LOOP:
5956 case OMP_SCAN:
5957 case OMP_SCOPE:
5958 case OMP_SECTIONS:
5959 case OMP_SECTION:
5960 case OMP_SINGLE:
5961 case OMP_MASTER:
5962 case OMP_MASKED:
5963 case OMP_TASKGROUP:
5964 case OMP_ORDERED:
5965 case OMP_CRITICAL:
5966 case OMP_TASK:
5967 case OMP_TARGET:
5968 case OMP_TARGET_DATA:
5969 case OMP_TARGET_UPDATE:
5970 case OMP_TARGET_ENTER_DATA:
5971 case OMP_TARGET_EXIT_DATA:
5972 case OMP_TASKLOOP:
5973 case OMP_TEAMS:
5974 /* These are always void. */
5975 return true;
5977 case CALL_EXPR:
5978 case MODIFY_EXPR:
5979 case PREDICT_EXPR:
5980 /* These are valid regardless of their type. */
5981 return true;
5983 default:
5984 return false;
5989 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5990 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5992 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5993 other, unmodified part of the complex object just before the total store.
5994 As a consequence, if the object is still uninitialized, an undefined value
5995 will be loaded into a register, which may result in a spurious exception
5996 if the register is floating-point and the value happens to be a signaling
5997 NaN for example. Then the fully-fledged complex operations lowering pass
5998 followed by a DCE pass are necessary in order to fix things up. */
6000 static enum gimplify_status
6001 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
6002 bool want_value)
6004 enum tree_code code, ocode;
6005 tree lhs, rhs, new_rhs, other, realpart, imagpart;
6007 lhs = TREE_OPERAND (*expr_p, 0);
6008 rhs = TREE_OPERAND (*expr_p, 1);
6009 code = TREE_CODE (lhs);
6010 lhs = TREE_OPERAND (lhs, 0);
6012 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
6013 other = build1 (ocode, TREE_TYPE (rhs), lhs);
6014 suppress_warning (other);
6015 other = get_formal_tmp_var (other, pre_p);
6017 realpart = code == REALPART_EXPR ? rhs : other;
6018 imagpart = code == REALPART_EXPR ? other : rhs;
6020 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
6021 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
6022 else
6023 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
6025 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
6026 *expr_p = (want_value) ? rhs : NULL_TREE;
6028 return GS_ALL_DONE;
6031 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6033 modify_expr
6034 : varname '=' rhs
6035 | '*' ID '=' rhs
6037 PRE_P points to the list where side effects that must happen before
6038 *EXPR_P should be stored.
6040 POST_P points to the list where side effects that must happen after
6041 *EXPR_P should be stored.
6043 WANT_VALUE is nonzero iff we want to use the value of this expression
6044 in another expression. */
6046 static enum gimplify_status
6047 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6048 bool want_value)
6050 tree *from_p = &TREE_OPERAND (*expr_p, 1);
6051 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6052 enum gimplify_status ret = GS_UNHANDLED;
6053 gimple *assign;
6054 location_t loc = EXPR_LOCATION (*expr_p);
6055 gimple_stmt_iterator gsi;
6057 if (error_operand_p (*from_p) || error_operand_p (*to_p))
6058 return GS_ERROR;
6060 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
6061 || TREE_CODE (*expr_p) == INIT_EXPR);
6063 /* Trying to simplify a clobber using normal logic doesn't work,
6064 so handle it here. */
6065 if (TREE_CLOBBER_P (*from_p))
6067 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6068 if (ret == GS_ERROR)
6069 return ret;
6070 gcc_assert (!want_value);
6071 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
6073 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
6074 pre_p, post_p);
6075 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
6077 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
6078 *expr_p = NULL;
6079 return GS_ALL_DONE;
6082 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6083 memset. */
6084 if (TREE_TYPE (*from_p) != error_mark_node
6085 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p))
6086 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p)))
6087 && TREE_CODE (*from_p) == CONSTRUCTOR
6088 && CONSTRUCTOR_NELTS (*from_p) == 0)
6090 maybe_with_size_expr (from_p);
6091 gcc_assert (TREE_CODE (*from_p) == WITH_SIZE_EXPR);
6092 return gimplify_modify_expr_to_memset (expr_p,
6093 TREE_OPERAND (*from_p, 1),
6094 want_value, pre_p);
6097 /* Insert pointer conversions required by the middle-end that are not
6098 required by the frontend. This fixes middle-end type checking for
6099 for example gcc.dg/redecl-6.c. */
6100 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
6102 STRIP_USELESS_TYPE_CONVERSION (*from_p);
6103 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
6104 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
6107 /* See if any simplifications can be done based on what the RHS is. */
6108 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6109 want_value);
6110 if (ret != GS_UNHANDLED)
6111 return ret;
6113 /* For empty types only gimplify the left hand side and right hand
6114 side as statements and throw away the assignment. Do this after
6115 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6116 types properly. */
6117 if (is_empty_type (TREE_TYPE (*from_p))
6118 && !want_value
6119 /* Don't do this for calls that return addressable types, expand_call
6120 relies on those having a lhs. */
6121 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
6122 && TREE_CODE (*from_p) == CALL_EXPR))
6124 gimplify_stmt (from_p, pre_p);
6125 gimplify_stmt (to_p, pre_p);
6126 *expr_p = NULL_TREE;
6127 return GS_ALL_DONE;
6130 /* If the value being copied is of variable width, compute the length
6131 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6132 before gimplifying any of the operands so that we can resolve any
6133 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6134 the size of the expression to be copied, not of the destination, so
6135 that is what we must do here. */
6136 maybe_with_size_expr (from_p);
6138 /* As a special case, we have to temporarily allow for assignments
6139 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6140 a toplevel statement, when gimplifying the GENERIC expression
6141 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6142 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6144 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6145 prevent gimplify_expr from trying to create a new temporary for
6146 foo's LHS, we tell it that it should only gimplify until it
6147 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6148 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6149 and all we need to do here is set 'a' to be its LHS. */
6151 /* Gimplify the RHS first for C++17 and bug 71104. */
6152 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
6153 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
6154 if (ret == GS_ERROR)
6155 return ret;
6157 /* Then gimplify the LHS. */
6158 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6159 twice we have to make sure to gimplify into non-SSA as otherwise
6160 the abnormal edge added later will make those defs not dominate
6161 their uses.
6162 ??? Technically this applies only to the registers used in the
6163 resulting non-register *TO_P. */
6164 bool saved_into_ssa = gimplify_ctxp->into_ssa;
6165 if (saved_into_ssa
6166 && TREE_CODE (*from_p) == CALL_EXPR
6167 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
6168 gimplify_ctxp->into_ssa = false;
6169 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6170 gimplify_ctxp->into_ssa = saved_into_ssa;
6171 if (ret == GS_ERROR)
6172 return ret;
6174 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6175 guess for the predicate was wrong. */
6176 gimple_predicate final_pred = rhs_predicate_for (*to_p);
6177 if (final_pred != initial_pred)
6179 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
6180 if (ret == GS_ERROR)
6181 return ret;
6184 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6185 size as argument to the call. */
6186 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6188 tree call = TREE_OPERAND (*from_p, 0);
6189 tree vlasize = TREE_OPERAND (*from_p, 1);
6191 if (TREE_CODE (call) == CALL_EXPR
6192 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
6194 int nargs = call_expr_nargs (call);
6195 tree type = TREE_TYPE (call);
6196 tree ap = CALL_EXPR_ARG (call, 0);
6197 tree tag = CALL_EXPR_ARG (call, 1);
6198 tree aptag = CALL_EXPR_ARG (call, 2);
6199 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
6200 IFN_VA_ARG, type,
6201 nargs + 1, ap, tag,
6202 aptag, vlasize);
6203 TREE_OPERAND (*from_p, 0) = newcall;
6207 /* Now see if the above changed *from_p to something we handle specially. */
6208 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6209 want_value);
6210 if (ret != GS_UNHANDLED)
6211 return ret;
6213 /* If we've got a variable sized assignment between two lvalues (i.e. does
6214 not involve a call), then we can make things a bit more straightforward
6215 by converting the assignment to memcpy or memset. */
6216 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6218 tree from = TREE_OPERAND (*from_p, 0);
6219 tree size = TREE_OPERAND (*from_p, 1);
6221 if (TREE_CODE (from) == CONSTRUCTOR)
6222 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
6224 if (is_gimple_addressable (from))
6226 *from_p = from;
6227 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
6228 pre_p);
6232 /* Transform partial stores to non-addressable complex variables into
6233 total stores. This allows us to use real instead of virtual operands
6234 for these variables, which improves optimization. */
6235 if ((TREE_CODE (*to_p) == REALPART_EXPR
6236 || TREE_CODE (*to_p) == IMAGPART_EXPR)
6237 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
6238 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
6240 /* Try to alleviate the effects of the gimplification creating artificial
6241 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6242 make sure not to create DECL_DEBUG_EXPR links across functions. */
6243 if (!gimplify_ctxp->into_ssa
6244 && VAR_P (*from_p)
6245 && DECL_IGNORED_P (*from_p)
6246 && DECL_P (*to_p)
6247 && !DECL_IGNORED_P (*to_p)
6248 && decl_function_context (*to_p) == current_function_decl
6249 && decl_function_context (*from_p) == current_function_decl)
6251 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
6252 DECL_NAME (*from_p)
6253 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
6254 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
6255 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
6258 if (want_value && TREE_THIS_VOLATILE (*to_p))
6259 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
6261 if (TREE_CODE (*from_p) == CALL_EXPR)
6263 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6264 instead of a GIMPLE_ASSIGN. */
6265 gcall *call_stmt;
6266 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
6268 /* Gimplify internal functions created in the FEs. */
6269 int nargs = call_expr_nargs (*from_p), i;
6270 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
6271 auto_vec<tree> vargs (nargs);
6273 for (i = 0; i < nargs; i++)
6275 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
6276 EXPR_LOCATION (*from_p));
6277 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
6279 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
6280 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
6281 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
6283 else
6285 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
6286 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
6287 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
6288 tree fndecl = get_callee_fndecl (*from_p);
6289 if (fndecl
6290 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
6291 && call_expr_nargs (*from_p) == 3)
6292 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
6293 CALL_EXPR_ARG (*from_p, 0),
6294 CALL_EXPR_ARG (*from_p, 1),
6295 CALL_EXPR_ARG (*from_p, 2));
6296 else
6298 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
6301 notice_special_calls (call_stmt);
6302 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
6303 gimple_call_set_lhs (call_stmt, *to_p);
6304 else if (TREE_CODE (*to_p) == SSA_NAME)
6305 /* The above is somewhat premature, avoid ICEing later for a
6306 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6307 ??? This doesn't make it a default-def. */
6308 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
6310 assign = call_stmt;
6312 else
6314 assign = gimple_build_assign (*to_p, *from_p);
6315 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
6316 if (COMPARISON_CLASS_P (*from_p))
6317 copy_warning (assign, *from_p);
6320 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6322 /* We should have got an SSA name from the start. */
6323 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
6324 || ! gimple_in_ssa_p (cfun));
6327 gimplify_seq_add_stmt (pre_p, assign);
6328 gsi = gsi_last (*pre_p);
6329 maybe_fold_stmt (&gsi);
6331 if (want_value)
6333 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
6334 return GS_OK;
6336 else
6337 *expr_p = NULL;
6339 return GS_ALL_DONE;
6342 /* Gimplify a comparison between two variable-sized objects. Do this
6343 with a call to BUILT_IN_MEMCMP. */
6345 static enum gimplify_status
6346 gimplify_variable_sized_compare (tree *expr_p)
6348 location_t loc = EXPR_LOCATION (*expr_p);
6349 tree op0 = TREE_OPERAND (*expr_p, 0);
6350 tree op1 = TREE_OPERAND (*expr_p, 1);
6351 tree t, arg, dest, src, expr;
6353 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6354 arg = unshare_expr (arg);
6355 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6356 src = build_fold_addr_expr_loc (loc, op1);
6357 dest = build_fold_addr_expr_loc (loc, op0);
6358 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6359 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6361 expr
6362 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6363 SET_EXPR_LOCATION (expr, loc);
6364 *expr_p = expr;
6366 return GS_OK;
6369 /* Gimplify a comparison between two aggregate objects of integral scalar
6370 mode as a comparison between the bitwise equivalent scalar values. */
6372 static enum gimplify_status
6373 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6375 location_t loc = EXPR_LOCATION (*expr_p);
6376 tree op0 = TREE_OPERAND (*expr_p, 0);
6377 tree op1 = TREE_OPERAND (*expr_p, 1);
6379 tree type = TREE_TYPE (op0);
6380 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6382 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6383 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6385 *expr_p
6386 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6388 return GS_OK;
6391 /* Gimplify an expression sequence. This function gimplifies each
6392 expression and rewrites the original expression with the last
6393 expression of the sequence in GIMPLE form.
6395 PRE_P points to the list where the side effects for all the
6396 expressions in the sequence will be emitted.
6398 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6400 static enum gimplify_status
6401 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6403 tree t = *expr_p;
6407 tree *sub_p = &TREE_OPERAND (t, 0);
6409 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6410 gimplify_compound_expr (sub_p, pre_p, false);
6411 else
6412 gimplify_stmt (sub_p, pre_p);
6414 t = TREE_OPERAND (t, 1);
6416 while (TREE_CODE (t) == COMPOUND_EXPR);
6418 *expr_p = t;
6419 if (want_value)
6420 return GS_OK;
6421 else
6423 gimplify_stmt (expr_p, pre_p);
6424 return GS_ALL_DONE;
6428 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6429 gimplify. After gimplification, EXPR_P will point to a new temporary
6430 that holds the original value of the SAVE_EXPR node.
6432 PRE_P points to the list where side effects that must happen before
6433 *EXPR_P should be stored. */
6435 static enum gimplify_status
6436 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6438 enum gimplify_status ret = GS_ALL_DONE;
6439 tree val;
6441 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6442 val = TREE_OPERAND (*expr_p, 0);
6444 if (val && TREE_TYPE (val) == error_mark_node)
6445 return GS_ERROR;
6447 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6448 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6450 /* The operand may be a void-valued expression. It is
6451 being executed only for its side-effects. */
6452 if (TREE_TYPE (val) == void_type_node)
6454 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6455 is_gimple_stmt, fb_none);
6456 val = NULL;
6458 else
6459 /* The temporary may not be an SSA name as later abnormal and EH
6460 control flow may invalidate use/def domination. When in SSA
6461 form then assume there are no such issues and SAVE_EXPRs only
6462 appear via GENERIC foldings. */
6463 val = get_initialized_tmp_var (val, pre_p, post_p,
6464 gimple_in_ssa_p (cfun));
6466 TREE_OPERAND (*expr_p, 0) = val;
6467 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6470 *expr_p = val;
6472 return ret;
6475 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6477 unary_expr
6478 : ...
6479 | '&' varname
6482 PRE_P points to the list where side effects that must happen before
6483 *EXPR_P should be stored.
6485 POST_P points to the list where side effects that must happen after
6486 *EXPR_P should be stored. */
6488 static enum gimplify_status
6489 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6491 tree expr = *expr_p;
6492 tree op0 = TREE_OPERAND (expr, 0);
6493 enum gimplify_status ret;
6494 location_t loc = EXPR_LOCATION (*expr_p);
6496 switch (TREE_CODE (op0))
6498 case INDIRECT_REF:
6499 do_indirect_ref:
6500 /* Check if we are dealing with an expression of the form '&*ptr'.
6501 While the front end folds away '&*ptr' into 'ptr', these
6502 expressions may be generated internally by the compiler (e.g.,
6503 builtins like __builtin_va_end). */
6504 /* Caution: the silent array decomposition semantics we allow for
6505 ADDR_EXPR means we can't always discard the pair. */
6506 /* Gimplification of the ADDR_EXPR operand may drop
6507 cv-qualification conversions, so make sure we add them if
6508 needed. */
6510 tree op00 = TREE_OPERAND (op0, 0);
6511 tree t_expr = TREE_TYPE (expr);
6512 tree t_op00 = TREE_TYPE (op00);
6514 if (!useless_type_conversion_p (t_expr, t_op00))
6515 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6516 *expr_p = op00;
6517 ret = GS_OK;
6519 break;
6521 case VIEW_CONVERT_EXPR:
6522 /* Take the address of our operand and then convert it to the type of
6523 this ADDR_EXPR.
6525 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6526 all clear. The impact of this transformation is even less clear. */
6528 /* If the operand is a useless conversion, look through it. Doing so
6529 guarantees that the ADDR_EXPR and its operand will remain of the
6530 same type. */
6531 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6532 op0 = TREE_OPERAND (op0, 0);
6534 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6535 build_fold_addr_expr_loc (loc,
6536 TREE_OPERAND (op0, 0)));
6537 ret = GS_OK;
6538 break;
6540 case MEM_REF:
6541 if (integer_zerop (TREE_OPERAND (op0, 1)))
6542 goto do_indirect_ref;
6544 /* fall through */
6546 default:
6547 /* If we see a call to a declared builtin or see its address
6548 being taken (we can unify those cases here) then we can mark
6549 the builtin for implicit generation by GCC. */
6550 if (TREE_CODE (op0) == FUNCTION_DECL
6551 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6552 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6553 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6555 /* We use fb_either here because the C frontend sometimes takes
6556 the address of a call that returns a struct; see
6557 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6558 the implied temporary explicit. */
6560 /* Make the operand addressable. */
6561 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6562 is_gimple_addressable, fb_either);
6563 if (ret == GS_ERROR)
6564 break;
6566 /* Then mark it. Beware that it may not be possible to do so directly
6567 if a temporary has been created by the gimplification. */
6568 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6570 op0 = TREE_OPERAND (expr, 0);
6572 /* For various reasons, the gimplification of the expression
6573 may have made a new INDIRECT_REF. */
6574 if (TREE_CODE (op0) == INDIRECT_REF
6575 || (TREE_CODE (op0) == MEM_REF
6576 && integer_zerop (TREE_OPERAND (op0, 1))))
6577 goto do_indirect_ref;
6579 mark_addressable (TREE_OPERAND (expr, 0));
6581 /* The FEs may end up building ADDR_EXPRs early on a decl with
6582 an incomplete type. Re-build ADDR_EXPRs in canonical form
6583 here. */
6584 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6585 *expr_p = build_fold_addr_expr (op0);
6587 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6588 recompute_tree_invariant_for_addr_expr (*expr_p);
6590 /* If we re-built the ADDR_EXPR add a conversion to the original type
6591 if required. */
6592 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6593 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6595 break;
6598 return ret;
6601 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6602 value; output operands should be a gimple lvalue. */
6604 static enum gimplify_status
6605 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6607 tree expr;
6608 int noutputs;
6609 const char **oconstraints;
6610 int i;
6611 tree link;
6612 const char *constraint;
6613 bool allows_mem, allows_reg, is_inout;
6614 enum gimplify_status ret, tret;
6615 gasm *stmt;
6616 vec<tree, va_gc> *inputs;
6617 vec<tree, va_gc> *outputs;
6618 vec<tree, va_gc> *clobbers;
6619 vec<tree, va_gc> *labels;
6620 tree link_next;
6622 expr = *expr_p;
6623 noutputs = list_length (ASM_OUTPUTS (expr));
6624 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6626 inputs = NULL;
6627 outputs = NULL;
6628 clobbers = NULL;
6629 labels = NULL;
6631 ret = GS_ALL_DONE;
6632 link_next = NULL_TREE;
6633 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6635 bool ok;
6636 size_t constraint_len;
6638 link_next = TREE_CHAIN (link);
6640 oconstraints[i]
6641 = constraint
6642 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6643 constraint_len = strlen (constraint);
6644 if (constraint_len == 0)
6645 continue;
6647 ok = parse_output_constraint (&constraint, i, 0, 0,
6648 &allows_mem, &allows_reg, &is_inout);
6649 if (!ok)
6651 ret = GS_ERROR;
6652 is_inout = false;
6655 /* If we can't make copies, we can only accept memory.
6656 Similarly for VLAs. */
6657 tree outtype = TREE_TYPE (TREE_VALUE (link));
6658 if (outtype != error_mark_node
6659 && (TREE_ADDRESSABLE (outtype)
6660 || !COMPLETE_TYPE_P (outtype)
6661 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6663 if (allows_mem)
6664 allows_reg = 0;
6665 else
6667 error ("impossible constraint in %<asm%>");
6668 error ("non-memory output %d must stay in memory", i);
6669 return GS_ERROR;
6673 if (!allows_reg && allows_mem)
6674 mark_addressable (TREE_VALUE (link));
6676 tree orig = TREE_VALUE (link);
6677 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6678 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6679 fb_lvalue | fb_mayfail);
6680 if (tret == GS_ERROR)
6682 if (orig != error_mark_node)
6683 error ("invalid lvalue in %<asm%> output %d", i);
6684 ret = tret;
6687 /* If the constraint does not allow memory make sure we gimplify
6688 it to a register if it is not already but its base is. This
6689 happens for complex and vector components. */
6690 if (!allows_mem)
6692 tree op = TREE_VALUE (link);
6693 if (! is_gimple_val (op)
6694 && is_gimple_reg_type (TREE_TYPE (op))
6695 && is_gimple_reg (get_base_address (op)))
6697 tree tem = create_tmp_reg (TREE_TYPE (op));
6698 tree ass;
6699 if (is_inout)
6701 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6702 tem, unshare_expr (op));
6703 gimplify_and_add (ass, pre_p);
6705 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6706 gimplify_and_add (ass, post_p);
6708 TREE_VALUE (link) = tem;
6709 tret = GS_OK;
6713 vec_safe_push (outputs, link);
6714 TREE_CHAIN (link) = NULL_TREE;
6716 if (is_inout)
6718 /* An input/output operand. To give the optimizers more
6719 flexibility, split it into separate input and output
6720 operands. */
6721 tree input;
6722 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6723 char buf[11];
6725 /* Turn the in/out constraint into an output constraint. */
6726 char *p = xstrdup (constraint);
6727 p[0] = '=';
6728 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6730 /* And add a matching input constraint. */
6731 if (allows_reg)
6733 sprintf (buf, "%u", i);
6735 /* If there are multiple alternatives in the constraint,
6736 handle each of them individually. Those that allow register
6737 will be replaced with operand number, the others will stay
6738 unchanged. */
6739 if (strchr (p, ',') != NULL)
6741 size_t len = 0, buflen = strlen (buf);
6742 char *beg, *end, *str, *dst;
6744 for (beg = p + 1;;)
6746 end = strchr (beg, ',');
6747 if (end == NULL)
6748 end = strchr (beg, '\0');
6749 if ((size_t) (end - beg) < buflen)
6750 len += buflen + 1;
6751 else
6752 len += end - beg + 1;
6753 if (*end)
6754 beg = end + 1;
6755 else
6756 break;
6759 str = (char *) alloca (len);
6760 for (beg = p + 1, dst = str;;)
6762 const char *tem;
6763 bool mem_p, reg_p, inout_p;
6765 end = strchr (beg, ',');
6766 if (end)
6767 *end = '\0';
6768 beg[-1] = '=';
6769 tem = beg - 1;
6770 parse_output_constraint (&tem, i, 0, 0,
6771 &mem_p, &reg_p, &inout_p);
6772 if (dst != str)
6773 *dst++ = ',';
6774 if (reg_p)
6776 memcpy (dst, buf, buflen);
6777 dst += buflen;
6779 else
6781 if (end)
6782 len = end - beg;
6783 else
6784 len = strlen (beg);
6785 memcpy (dst, beg, len);
6786 dst += len;
6788 if (end)
6789 beg = end + 1;
6790 else
6791 break;
6793 *dst = '\0';
6794 input = build_string (dst - str, str);
6796 else
6797 input = build_string (strlen (buf), buf);
6799 else
6800 input = build_string (constraint_len - 1, constraint + 1);
6802 free (p);
6804 input = build_tree_list (build_tree_list (NULL_TREE, input),
6805 unshare_expr (TREE_VALUE (link)));
6806 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6810 link_next = NULL_TREE;
6811 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6813 link_next = TREE_CHAIN (link);
6814 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6815 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6816 oconstraints, &allows_mem, &allows_reg);
6818 /* If we can't make copies, we can only accept memory. */
6819 tree intype = TREE_TYPE (TREE_VALUE (link));
6820 if (intype != error_mark_node
6821 && (TREE_ADDRESSABLE (intype)
6822 || !COMPLETE_TYPE_P (intype)
6823 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6825 if (allows_mem)
6826 allows_reg = 0;
6827 else
6829 error ("impossible constraint in %<asm%>");
6830 error ("non-memory input %d must stay in memory", i);
6831 return GS_ERROR;
6835 /* If the operand is a memory input, it should be an lvalue. */
6836 if (!allows_reg && allows_mem)
6838 tree inputv = TREE_VALUE (link);
6839 STRIP_NOPS (inputv);
6840 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6841 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6842 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6843 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6844 || TREE_CODE (inputv) == MODIFY_EXPR)
6845 TREE_VALUE (link) = error_mark_node;
6846 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6847 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6848 if (tret != GS_ERROR)
6850 /* Unlike output operands, memory inputs are not guaranteed
6851 to be lvalues by the FE, and while the expressions are
6852 marked addressable there, if it is e.g. a statement
6853 expression, temporaries in it might not end up being
6854 addressable. They might be already used in the IL and thus
6855 it is too late to make them addressable now though. */
6856 tree x = TREE_VALUE (link);
6857 while (handled_component_p (x))
6858 x = TREE_OPERAND (x, 0);
6859 if (TREE_CODE (x) == MEM_REF
6860 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6861 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6862 if ((VAR_P (x)
6863 || TREE_CODE (x) == PARM_DECL
6864 || TREE_CODE (x) == RESULT_DECL)
6865 && !TREE_ADDRESSABLE (x)
6866 && is_gimple_reg (x))
6868 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6869 input_location), 0,
6870 "memory input %d is not directly addressable",
6872 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6875 mark_addressable (TREE_VALUE (link));
6876 if (tret == GS_ERROR)
6878 if (inputv != error_mark_node)
6879 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6880 "memory input %d is not directly addressable", i);
6881 ret = tret;
6884 else
6886 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6887 is_gimple_asm_val, fb_rvalue);
6888 if (tret == GS_ERROR)
6889 ret = tret;
6892 TREE_CHAIN (link) = NULL_TREE;
6893 vec_safe_push (inputs, link);
6896 link_next = NULL_TREE;
6897 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6899 link_next = TREE_CHAIN (link);
6900 TREE_CHAIN (link) = NULL_TREE;
6901 vec_safe_push (clobbers, link);
6904 link_next = NULL_TREE;
6905 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6907 link_next = TREE_CHAIN (link);
6908 TREE_CHAIN (link) = NULL_TREE;
6909 vec_safe_push (labels, link);
6912 /* Do not add ASMs with errors to the gimple IL stream. */
6913 if (ret != GS_ERROR)
6915 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6916 inputs, outputs, clobbers, labels);
6918 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6919 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6920 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6922 gimplify_seq_add_stmt (pre_p, stmt);
6925 return ret;
6928 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6929 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6930 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6931 return to this function.
6933 FIXME should we complexify the prequeue handling instead? Or use flags
6934 for all the cleanups and let the optimizer tighten them up? The current
6935 code seems pretty fragile; it will break on a cleanup within any
6936 non-conditional nesting. But any such nesting would be broken, anyway;
6937 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6938 and continues out of it. We can do that at the RTL level, though, so
6939 having an optimizer to tighten up try/finally regions would be a Good
6940 Thing. */
6942 static enum gimplify_status
6943 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6945 gimple_stmt_iterator iter;
6946 gimple_seq body_sequence = NULL;
6948 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6950 /* We only care about the number of conditions between the innermost
6951 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6952 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6953 int old_conds = gimplify_ctxp->conditions;
6954 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6955 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6956 gimplify_ctxp->conditions = 0;
6957 gimplify_ctxp->conditional_cleanups = NULL;
6958 gimplify_ctxp->in_cleanup_point_expr = true;
6960 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6962 gimplify_ctxp->conditions = old_conds;
6963 gimplify_ctxp->conditional_cleanups = old_cleanups;
6964 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6966 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6968 gimple *wce = gsi_stmt (iter);
6970 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6972 if (gsi_one_before_end_p (iter))
6974 /* Note that gsi_insert_seq_before and gsi_remove do not
6975 scan operands, unlike some other sequence mutators. */
6976 if (!gimple_wce_cleanup_eh_only (wce))
6977 gsi_insert_seq_before_without_update (&iter,
6978 gimple_wce_cleanup (wce),
6979 GSI_SAME_STMT);
6980 gsi_remove (&iter, true);
6981 break;
6983 else
6985 gtry *gtry;
6986 gimple_seq seq;
6987 enum gimple_try_flags kind;
6989 if (gimple_wce_cleanup_eh_only (wce))
6990 kind = GIMPLE_TRY_CATCH;
6991 else
6992 kind = GIMPLE_TRY_FINALLY;
6993 seq = gsi_split_seq_after (iter);
6995 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6996 /* Do not use gsi_replace here, as it may scan operands.
6997 We want to do a simple structural modification only. */
6998 gsi_set_stmt (&iter, gtry);
6999 iter = gsi_start (gtry->eval);
7002 else
7003 gsi_next (&iter);
7006 gimplify_seq_add_seq (pre_p, body_sequence);
7007 if (temp)
7009 *expr_p = temp;
7010 return GS_OK;
7012 else
7014 *expr_p = NULL;
7015 return GS_ALL_DONE;
7019 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7020 is the cleanup action required. EH_ONLY is true if the cleanup should
7021 only be executed if an exception is thrown, not on normal exit.
7022 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7023 only valid for clobbers. */
7025 static void
7026 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
7027 bool force_uncond = false)
7029 gimple *wce;
7030 gimple_seq cleanup_stmts = NULL;
7032 /* Errors can result in improperly nested cleanups. Which results in
7033 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7034 if (seen_error ())
7035 return;
7037 if (gimple_conditional_context ())
7039 /* If we're in a conditional context, this is more complex. We only
7040 want to run the cleanup if we actually ran the initialization that
7041 necessitates it, but we want to run it after the end of the
7042 conditional context. So we wrap the try/finally around the
7043 condition and use a flag to determine whether or not to actually
7044 run the destructor. Thus
7046 test ? f(A()) : 0
7048 becomes (approximately)
7050 flag = 0;
7051 try {
7052 if (test) { A::A(temp); flag = 1; val = f(temp); }
7053 else { val = 0; }
7054 } finally {
7055 if (flag) A::~A(temp);
7059 if (force_uncond)
7061 gimplify_stmt (&cleanup, &cleanup_stmts);
7062 wce = gimple_build_wce (cleanup_stmts);
7063 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7065 else
7067 tree flag = create_tmp_var (boolean_type_node, "cleanup");
7068 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
7069 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
7071 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
7072 gimplify_stmt (&cleanup, &cleanup_stmts);
7073 wce = gimple_build_wce (cleanup_stmts);
7074 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7076 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
7077 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7078 gimplify_seq_add_stmt (pre_p, ftrue);
7080 /* Because of this manipulation, and the EH edges that jump
7081 threading cannot redirect, the temporary (VAR) will appear
7082 to be used uninitialized. Don't warn. */
7083 suppress_warning (var, OPT_Wuninitialized);
7086 else
7088 gimplify_stmt (&cleanup, &cleanup_stmts);
7089 wce = gimple_build_wce (cleanup_stmts);
7090 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7091 gimplify_seq_add_stmt (pre_p, wce);
7095 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7097 static enum gimplify_status
7098 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7100 tree targ = *expr_p;
7101 tree temp = TARGET_EXPR_SLOT (targ);
7102 tree init = TARGET_EXPR_INITIAL (targ);
7103 enum gimplify_status ret;
7105 bool unpoison_empty_seq = false;
7106 gimple_stmt_iterator unpoison_it;
7108 if (init)
7110 gimple_seq init_pre_p = NULL;
7112 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7113 to the temps list. Handle also variable length TARGET_EXPRs. */
7114 if (!poly_int_tree_p (DECL_SIZE (temp)))
7116 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
7117 gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
7118 /* FIXME: this is correct only when the size of the type does
7119 not depend on expressions evaluated in init. */
7120 gimplify_vla_decl (temp, &init_pre_p);
7122 else
7124 /* Save location where we need to place unpoisoning. It's possible
7125 that a variable will be converted to needs_to_live_in_memory. */
7126 unpoison_it = gsi_last (*pre_p);
7127 unpoison_empty_seq = gsi_end_p (unpoison_it);
7129 gimple_add_tmp_var (temp);
7132 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7133 expression is supposed to initialize the slot. */
7134 if (VOID_TYPE_P (TREE_TYPE (init)))
7135 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7136 fb_none);
7137 else
7139 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
7140 init = init_expr;
7141 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7142 fb_none);
7143 init = NULL;
7144 ggc_free (init_expr);
7146 if (ret == GS_ERROR)
7148 /* PR c++/28266 Make sure this is expanded only once. */
7149 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7150 return GS_ERROR;
7153 if (init)
7154 gimplify_and_add (init, &init_pre_p);
7156 /* Add a clobber for the temporary going out of scope, like
7157 gimplify_bind_expr. */
7158 if (gimplify_ctxp->in_cleanup_point_expr
7159 && needs_to_live_in_memory (temp))
7161 if (flag_stack_reuse == SR_ALL)
7163 tree clobber = build_clobber (TREE_TYPE (temp), CLOBBER_EOL);
7164 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
7165 gimple_push_cleanup (temp, clobber, false, pre_p, true);
7167 if (asan_poisoned_variables
7168 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
7169 && !TREE_STATIC (temp)
7170 && dbg_cnt (asan_use_after_scope)
7171 && !gimplify_omp_ctxp)
7173 tree asan_cleanup = build_asan_poison_call_expr (temp);
7174 if (asan_cleanup)
7176 if (unpoison_empty_seq)
7177 unpoison_it = gsi_start (*pre_p);
7179 asan_poison_variable (temp, false, &unpoison_it,
7180 unpoison_empty_seq);
7181 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
7186 gimple_seq_add_seq (pre_p, init_pre_p);
7188 /* If needed, push the cleanup for the temp. */
7189 if (TARGET_EXPR_CLEANUP (targ))
7190 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
7191 CLEANUP_EH_ONLY (targ), pre_p);
7193 /* Only expand this once. */
7194 TREE_OPERAND (targ, 3) = init;
7195 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7197 else
7198 /* We should have expanded this before. */
7199 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
7201 *expr_p = temp;
7202 return GS_OK;
7205 /* Gimplification of expression trees. */
7207 /* Gimplify an expression which appears at statement context. The
7208 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7209 NULL, a new sequence is allocated.
7211 Return true if we actually added a statement to the queue. */
7213 bool
7214 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
7216 gimple_seq_node last;
7218 last = gimple_seq_last (*seq_p);
7219 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
7220 return last != gimple_seq_last (*seq_p);
7223 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7224 to CTX. If entries already exist, force them to be some flavor of private.
7225 If there is no enclosing parallel, do nothing. */
7227 void
7228 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
7230 splay_tree_node n;
7232 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
7233 return;
7237 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7238 if (n != NULL)
7240 if (n->value & GOVD_SHARED)
7241 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
7242 else if (n->value & GOVD_MAP)
7243 n->value |= GOVD_MAP_TO_ONLY;
7244 else
7245 return;
7247 else if ((ctx->region_type & ORT_TARGET) != 0)
7249 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
7250 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7251 else
7252 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
7254 else if (ctx->region_type != ORT_WORKSHARE
7255 && ctx->region_type != ORT_TASKGROUP
7256 && ctx->region_type != ORT_SIMD
7257 && ctx->region_type != ORT_ACC
7258 && !(ctx->region_type & ORT_TARGET_DATA))
7259 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7261 ctx = ctx->outer_context;
7263 while (ctx);
7266 /* Similarly for each of the type sizes of TYPE. */
7268 static void
7269 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
7271 if (type == NULL || type == error_mark_node)
7272 return;
7273 type = TYPE_MAIN_VARIANT (type);
7275 if (ctx->privatized_types->add (type))
7276 return;
7278 switch (TREE_CODE (type))
7280 case INTEGER_TYPE:
7281 case ENUMERAL_TYPE:
7282 case BOOLEAN_TYPE:
7283 case REAL_TYPE:
7284 case FIXED_POINT_TYPE:
7285 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
7286 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
7287 break;
7289 case ARRAY_TYPE:
7290 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7291 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
7292 break;
7294 case RECORD_TYPE:
7295 case UNION_TYPE:
7296 case QUAL_UNION_TYPE:
7298 tree field;
7299 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7300 if (TREE_CODE (field) == FIELD_DECL)
7302 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
7303 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
7306 break;
7308 case POINTER_TYPE:
7309 case REFERENCE_TYPE:
7310 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7311 break;
7313 default:
7314 break;
7317 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
7318 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
7319 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
7322 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7324 static void
7325 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
7327 splay_tree_node n;
7328 unsigned int nflags;
7329 tree t;
7331 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
7332 return;
7334 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7335 there are constructors involved somewhere. Exception is a shared clause,
7336 there is nothing privatized in that case. */
7337 if ((flags & GOVD_SHARED) == 0
7338 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
7339 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
7340 flags |= GOVD_SEEN;
7342 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7343 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7345 /* We shouldn't be re-adding the decl with the same data
7346 sharing class. */
7347 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
7348 nflags = n->value | flags;
7349 /* The only combination of data sharing classes we should see is
7350 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7351 reduction variables to be used in data sharing clauses. */
7352 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7353 || ((nflags & GOVD_DATA_SHARE_CLASS)
7354 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7355 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7356 n->value = nflags;
7357 return;
7360 /* When adding a variable-sized variable, we have to handle all sorts
7361 of additional bits of data: the pointer replacement variable, and
7362 the parameters of the type. */
7363 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7365 /* Add the pointer replacement variable as PRIVATE if the variable
7366 replacement is private, else FIRSTPRIVATE since we'll need the
7367 address of the original variable either for SHARED, or for the
7368 copy into or out of the context. */
7369 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7371 if (flags & GOVD_MAP)
7372 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7373 else if (flags & GOVD_PRIVATE)
7374 nflags = GOVD_PRIVATE;
7375 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7376 && (flags & GOVD_FIRSTPRIVATE))
7377 || (ctx->region_type == ORT_TARGET_DATA
7378 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7379 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7380 else
7381 nflags = GOVD_FIRSTPRIVATE;
7382 nflags |= flags & GOVD_SEEN;
7383 t = DECL_VALUE_EXPR (decl);
7384 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7385 t = TREE_OPERAND (t, 0);
7386 gcc_assert (DECL_P (t));
7387 omp_add_variable (ctx, t, nflags);
7390 /* Add all of the variable and type parameters (which should have
7391 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7392 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7393 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7394 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7396 /* The variable-sized variable itself is never SHARED, only some form
7397 of PRIVATE. The sharing would take place via the pointer variable
7398 which we remapped above. */
7399 if (flags & GOVD_SHARED)
7400 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7401 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7403 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7404 alloca statement we generate for the variable, so make sure it
7405 is available. This isn't automatically needed for the SHARED
7406 case, since we won't be allocating local storage then.
7407 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7408 in this case omp_notice_variable will be called later
7409 on when it is gimplified. */
7410 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7411 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7412 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7414 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7415 && omp_privatize_by_reference (decl))
7417 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7419 /* Similar to the direct variable sized case above, we'll need the
7420 size of references being privatized. */
7421 if ((flags & GOVD_SHARED) == 0)
7423 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7424 if (t && DECL_P (t))
7425 omp_notice_variable (ctx, t, true);
7429 if (n != NULL)
7430 n->value |= flags;
7431 else
7432 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7434 /* For reductions clauses in OpenACC loop directives, by default create a
7435 copy clause on the enclosing parallel construct for carrying back the
7436 results. */
7437 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7439 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7440 while (outer_ctx)
7442 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7443 if (n != NULL)
7445 /* Ignore local variables and explicitly declared clauses. */
7446 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7447 break;
7448 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7450 /* According to the OpenACC spec, such a reduction variable
7451 should already have a copy map on a kernels construct,
7452 verify that here. */
7453 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7454 && (n->value & GOVD_MAP));
7456 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7458 /* Remove firstprivate and make it a copy map. */
7459 n->value &= ~GOVD_FIRSTPRIVATE;
7460 n->value |= GOVD_MAP;
7463 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7465 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7466 GOVD_MAP | GOVD_SEEN);
7467 break;
7469 outer_ctx = outer_ctx->outer_context;
7474 /* Notice a threadprivate variable DECL used in OMP context CTX.
7475 This just prints out diagnostics about threadprivate variable uses
7476 in untied tasks. If DECL2 is non-NULL, prevent this warning
7477 on that variable. */
7479 static bool
7480 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7481 tree decl2)
7483 splay_tree_node n;
7484 struct gimplify_omp_ctx *octx;
7486 for (octx = ctx; octx; octx = octx->outer_context)
7487 if ((octx->region_type & ORT_TARGET) != 0
7488 || octx->order_concurrent)
7490 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7491 if (n == NULL)
7493 if (octx->order_concurrent)
7495 error ("threadprivate variable %qE used in a region with"
7496 " %<order(concurrent)%> clause", DECL_NAME (decl));
7497 inform (octx->location, "enclosing region");
7499 else
7501 error ("threadprivate variable %qE used in target region",
7502 DECL_NAME (decl));
7503 inform (octx->location, "enclosing target region");
7505 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7507 if (decl2)
7508 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7511 if (ctx->region_type != ORT_UNTIED_TASK)
7512 return false;
7513 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7514 if (n == NULL)
7516 error ("threadprivate variable %qE used in untied task",
7517 DECL_NAME (decl));
7518 inform (ctx->location, "enclosing task");
7519 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7521 if (decl2)
7522 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7523 return false;
7526 /* Return true if global var DECL is device resident. */
7528 static bool
7529 device_resident_p (tree decl)
7531 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7533 if (!attr)
7534 return false;
7536 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7538 tree c = TREE_VALUE (t);
7539 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7540 return true;
7543 return false;
7546 /* Return true if DECL has an ACC DECLARE attribute. */
7548 static bool
7549 is_oacc_declared (tree decl)
7551 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7552 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7553 return declared != NULL_TREE;
7556 /* Determine outer default flags for DECL mentioned in an OMP region
7557 but not declared in an enclosing clause.
7559 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7560 remapped firstprivate instead of shared. To some extent this is
7561 addressed in omp_firstprivatize_type_sizes, but not
7562 effectively. */
7564 static unsigned
7565 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7566 bool in_code, unsigned flags)
7568 enum omp_clause_default_kind default_kind = ctx->default_kind;
7569 enum omp_clause_default_kind kind;
7571 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7572 if (ctx->region_type & ORT_TASK)
7574 tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
7576 /* The event-handle specified by a detach clause should always be firstprivate,
7577 regardless of the current default. */
7578 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7579 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7581 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7582 default_kind = kind;
7583 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7584 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7585 /* For C/C++ default({,first}private), variables with static storage duration
7586 declared in a namespace or global scope and referenced in construct
7587 must be explicitly specified, i.e. acts as default(none). */
7588 else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
7589 || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
7590 && VAR_P (decl)
7591 && is_global_var (decl)
7592 && (DECL_FILE_SCOPE_P (decl)
7593 || (DECL_CONTEXT (decl)
7594 && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
7595 && !lang_GNU_Fortran ())
7596 default_kind = OMP_CLAUSE_DEFAULT_NONE;
7598 switch (default_kind)
7600 case OMP_CLAUSE_DEFAULT_NONE:
7602 const char *rtype;
7604 if (ctx->region_type & ORT_PARALLEL)
7605 rtype = "parallel";
7606 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7607 rtype = "taskloop";
7608 else if (ctx->region_type & ORT_TASK)
7609 rtype = "task";
7610 else if (ctx->region_type & ORT_TEAMS)
7611 rtype = "teams";
7612 else
7613 gcc_unreachable ();
7615 error ("%qE not specified in enclosing %qs",
7616 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7617 inform (ctx->location, "enclosing %qs", rtype);
7619 /* FALLTHRU */
7620 case OMP_CLAUSE_DEFAULT_SHARED:
7621 flags |= GOVD_SHARED;
7622 break;
7623 case OMP_CLAUSE_DEFAULT_PRIVATE:
7624 flags |= GOVD_PRIVATE;
7625 break;
7626 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7627 flags |= GOVD_FIRSTPRIVATE;
7628 break;
7629 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7630 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7631 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7632 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7634 omp_notice_variable (octx, decl, in_code);
7635 for (; octx; octx = octx->outer_context)
7637 splay_tree_node n2;
7639 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7640 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7641 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7642 continue;
7643 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7645 flags |= GOVD_FIRSTPRIVATE;
7646 goto found_outer;
7648 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7650 flags |= GOVD_SHARED;
7651 goto found_outer;
7656 if (TREE_CODE (decl) == PARM_DECL
7657 || (!is_global_var (decl)
7658 && DECL_CONTEXT (decl) == current_function_decl))
7659 flags |= GOVD_FIRSTPRIVATE;
7660 else
7661 flags |= GOVD_SHARED;
7662 found_outer:
7663 break;
7665 default:
7666 gcc_unreachable ();
7669 return flags;
7673 /* Determine outer default flags for DECL mentioned in an OACC region
7674 but not declared in an enclosing clause. */
7676 static unsigned
7677 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7679 const char *rkind;
7680 bool on_device = false;
7681 bool is_private = false;
7682 bool declared = is_oacc_declared (decl);
7683 tree type = TREE_TYPE (decl);
7685 if (omp_privatize_by_reference (decl))
7686 type = TREE_TYPE (type);
7688 /* For Fortran COMMON blocks, only used variables in those blocks are
7689 transfered and remapped. The block itself will have a private clause to
7690 avoid transfering the data twice.
7691 The hook evaluates to false by default. For a variable in Fortran's COMMON
7692 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7693 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7694 the whole block. For C++ and Fortran, it can also be true under certain
7695 other conditions, if DECL_HAS_VALUE_EXPR. */
7696 if (RECORD_OR_UNION_TYPE_P (type))
7697 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7699 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7700 && is_global_var (decl)
7701 && device_resident_p (decl)
7702 && !is_private)
7704 on_device = true;
7705 flags |= GOVD_MAP_TO_ONLY;
7708 switch (ctx->region_type)
7710 case ORT_ACC_KERNELS:
7711 rkind = "kernels";
7713 if (is_private)
7714 flags |= GOVD_FIRSTPRIVATE;
7715 else if (AGGREGATE_TYPE_P (type))
7717 /* Aggregates default to 'present_or_copy', or 'present'. */
7718 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7719 flags |= GOVD_MAP;
7720 else
7721 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7723 else
7724 /* Scalars default to 'copy'. */
7725 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7727 break;
7729 case ORT_ACC_PARALLEL:
7730 case ORT_ACC_SERIAL:
7731 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7733 if (is_private)
7734 flags |= GOVD_FIRSTPRIVATE;
7735 else if (on_device || declared)
7736 flags |= GOVD_MAP;
7737 else if (AGGREGATE_TYPE_P (type))
7739 /* Aggregates default to 'present_or_copy', or 'present'. */
7740 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7741 flags |= GOVD_MAP;
7742 else
7743 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7745 else
7746 /* Scalars default to 'firstprivate'. */
7747 flags |= GOVD_FIRSTPRIVATE;
7749 break;
7751 default:
7752 gcc_unreachable ();
7755 if (DECL_ARTIFICIAL (decl))
7756 ; /* We can get compiler-generated decls, and should not complain
7757 about them. */
7758 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7760 error ("%qE not specified in enclosing OpenACC %qs construct",
7761 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7762 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7764 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7765 ; /* Handled above. */
7766 else
7767 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7769 return flags;
7772 /* Record the fact that DECL was used within the OMP context CTX.
7773 IN_CODE is true when real code uses DECL, and false when we should
7774 merely emit default(none) errors. Return true if DECL is going to
7775 be remapped and thus DECL shouldn't be gimplified into its
7776 DECL_VALUE_EXPR (if any). */
7778 static bool
7779 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7781 splay_tree_node n;
7782 unsigned flags = in_code ? GOVD_SEEN : 0;
7783 bool ret = false, shared;
7785 if (error_operand_p (decl))
7786 return false;
7788 if (ctx->region_type == ORT_NONE)
7789 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7791 if (is_global_var (decl))
7793 /* Threadprivate variables are predetermined. */
7794 if (DECL_THREAD_LOCAL_P (decl))
7795 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7797 if (DECL_HAS_VALUE_EXPR_P (decl))
7799 if (ctx->region_type & ORT_ACC)
7800 /* For OpenACC, defer expansion of value to avoid transfering
7801 privatized common block data instead of im-/explicitly transfered
7802 variables which are in common blocks. */
7804 else
7806 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7808 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7809 return omp_notice_threadprivate_variable (ctx, decl, value);
7813 if (gimplify_omp_ctxp->outer_context == NULL
7814 && VAR_P (decl)
7815 && oacc_get_fn_attrib (current_function_decl))
7817 location_t loc = DECL_SOURCE_LOCATION (decl);
7819 if (lookup_attribute ("omp declare target link",
7820 DECL_ATTRIBUTES (decl)))
7822 error_at (loc,
7823 "%qE with %<link%> clause used in %<routine%> function",
7824 DECL_NAME (decl));
7825 return false;
7827 else if (!lookup_attribute ("omp declare target",
7828 DECL_ATTRIBUTES (decl)))
7830 error_at (loc,
7831 "%qE requires a %<declare%> directive for use "
7832 "in a %<routine%> function", DECL_NAME (decl));
7833 return false;
7838 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7839 if ((ctx->region_type & ORT_TARGET) != 0)
7841 if (ctx->region_type & ORT_ACC)
7842 /* For OpenACC, as remarked above, defer expansion. */
7843 shared = false;
7844 else
7845 shared = true;
7847 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7848 if (n == NULL)
7850 unsigned nflags = flags;
7851 if ((ctx->region_type & ORT_ACC) == 0)
7853 bool is_declare_target = false;
7854 if (is_global_var (decl)
7855 && varpool_node::get_create (decl)->offloadable)
7857 struct gimplify_omp_ctx *octx;
7858 for (octx = ctx->outer_context;
7859 octx; octx = octx->outer_context)
7861 n = splay_tree_lookup (octx->variables,
7862 (splay_tree_key)decl);
7863 if (n
7864 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7865 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7866 break;
7868 is_declare_target = octx == NULL;
7870 if (!is_declare_target)
7872 int gdmk;
7873 enum omp_clause_defaultmap_kind kind;
7874 if (lang_hooks.decls.omp_allocatable_p (decl))
7875 gdmk = GDMK_ALLOCATABLE;
7876 else if (lang_hooks.decls.omp_scalar_target_p (decl))
7877 gdmk = GDMK_SCALAR_TARGET;
7878 else if (lang_hooks.decls.omp_scalar_p (decl, false))
7879 gdmk = GDMK_SCALAR;
7880 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7881 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7882 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7883 == POINTER_TYPE)))
7884 gdmk = GDMK_POINTER;
7885 else
7886 gdmk = GDMK_AGGREGATE;
7887 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
7888 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
7890 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
7891 nflags |= GOVD_FIRSTPRIVATE;
7892 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
7893 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
7894 else
7895 gcc_unreachable ();
7897 else if (ctx->defaultmap[gdmk] == 0)
7899 tree d = lang_hooks.decls.omp_report_decl (decl);
7900 error ("%qE not specified in enclosing %<target%>",
7901 DECL_NAME (d));
7902 inform (ctx->location, "enclosing %<target%>");
7904 else if (ctx->defaultmap[gdmk]
7905 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7906 nflags |= ctx->defaultmap[gdmk];
7907 else
7909 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7910 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7915 struct gimplify_omp_ctx *octx = ctx->outer_context;
7916 if ((ctx->region_type & ORT_ACC) && octx)
7918 /* Look in outer OpenACC contexts, to see if there's a
7919 data attribute for this variable. */
7920 omp_notice_variable (octx, decl, in_code);
7922 for (; octx; octx = octx->outer_context)
7924 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7925 break;
7926 splay_tree_node n2
7927 = splay_tree_lookup (octx->variables,
7928 (splay_tree_key) decl);
7929 if (n2)
7931 if (octx->region_type == ORT_ACC_HOST_DATA)
7932 error ("variable %qE declared in enclosing "
7933 "%<host_data%> region", DECL_NAME (decl));
7934 nflags |= GOVD_MAP;
7935 if (octx->region_type == ORT_ACC_DATA
7936 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7937 nflags |= GOVD_MAP_0LEN_ARRAY;
7938 goto found_outer;
7943 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7944 | GOVD_MAP_ALLOC_ONLY)) == flags)
7946 tree type = TREE_TYPE (decl);
7948 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7949 && omp_privatize_by_reference (decl))
7950 type = TREE_TYPE (type);
7951 if (!omp_mappable_type (type))
7953 error ("%qD referenced in target region does not have "
7954 "a mappable type", decl);
7955 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7957 else
7959 if ((ctx->region_type & ORT_ACC) != 0)
7960 nflags = oacc_default_clause (ctx, decl, flags);
7961 else
7962 nflags |= GOVD_MAP;
7965 found_outer:
7966 omp_add_variable (ctx, decl, nflags);
7968 else
7970 /* If nothing changed, there's nothing left to do. */
7971 if ((n->value & flags) == flags)
7972 return ret;
7973 flags |= n->value;
7974 n->value = flags;
7976 goto do_outer;
7979 if (n == NULL)
7981 if (ctx->region_type == ORT_WORKSHARE
7982 || ctx->region_type == ORT_TASKGROUP
7983 || ctx->region_type == ORT_SIMD
7984 || ctx->region_type == ORT_ACC
7985 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7986 goto do_outer;
7988 flags = omp_default_clause (ctx, decl, in_code, flags);
7990 if ((flags & GOVD_PRIVATE)
7991 && lang_hooks.decls.omp_private_outer_ref (decl))
7992 flags |= GOVD_PRIVATE_OUTER_REF;
7994 omp_add_variable (ctx, decl, flags);
7996 shared = (flags & GOVD_SHARED) != 0;
7997 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7998 goto do_outer;
8001 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
8002 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
8003 if (ctx->region_type == ORT_SIMD
8004 && ctx->in_for_exprs
8005 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
8006 == GOVD_PRIVATE))
8007 flags &= ~GOVD_SEEN;
8009 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
8010 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
8011 && DECL_SIZE (decl))
8013 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8015 splay_tree_node n2;
8016 tree t = DECL_VALUE_EXPR (decl);
8017 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
8018 t = TREE_OPERAND (t, 0);
8019 gcc_assert (DECL_P (t));
8020 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8021 n2->value |= GOVD_SEEN;
8023 else if (omp_privatize_by_reference (decl)
8024 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
8025 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
8026 != INTEGER_CST))
8028 splay_tree_node n2;
8029 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
8030 gcc_assert (DECL_P (t));
8031 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8032 if (n2)
8033 omp_notice_variable (ctx, t, true);
8037 if (ctx->region_type & ORT_ACC)
8038 /* For OpenACC, as remarked above, defer expansion. */
8039 shared = false;
8040 else
8041 shared = ((flags | n->value) & GOVD_SHARED) != 0;
8042 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8044 /* If nothing changed, there's nothing left to do. */
8045 if ((n->value & flags) == flags)
8046 return ret;
8047 flags |= n->value;
8048 n->value = flags;
8050 do_outer:
8051 /* If the variable is private in the current context, then we don't
8052 need to propagate anything to an outer context. */
8053 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
8054 return ret;
8055 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8056 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8057 return ret;
8058 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8059 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8060 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8061 return ret;
8062 if (ctx->outer_context
8063 && omp_notice_variable (ctx->outer_context, decl, in_code))
8064 return true;
8065 return ret;
8068 /* Verify that DECL is private within CTX. If there's specific information
8069 to the contrary in the innermost scope, generate an error. */
8071 static bool
8072 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
8074 splay_tree_node n;
8076 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8077 if (n != NULL)
8079 if (n->value & GOVD_SHARED)
8081 if (ctx == gimplify_omp_ctxp)
8083 if (simd)
8084 error ("iteration variable %qE is predetermined linear",
8085 DECL_NAME (decl));
8086 else
8087 error ("iteration variable %qE should be private",
8088 DECL_NAME (decl));
8089 n->value = GOVD_PRIVATE;
8090 return true;
8092 else
8093 return false;
8095 else if ((n->value & GOVD_EXPLICIT) != 0
8096 && (ctx == gimplify_omp_ctxp
8097 || (ctx->region_type == ORT_COMBINED_PARALLEL
8098 && gimplify_omp_ctxp->outer_context == ctx)))
8100 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
8101 error ("iteration variable %qE should not be firstprivate",
8102 DECL_NAME (decl));
8103 else if ((n->value & GOVD_REDUCTION) != 0)
8104 error ("iteration variable %qE should not be reduction",
8105 DECL_NAME (decl));
8106 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
8107 error ("iteration variable %qE should not be linear",
8108 DECL_NAME (decl));
8110 return (ctx == gimplify_omp_ctxp
8111 || (ctx->region_type == ORT_COMBINED_PARALLEL
8112 && gimplify_omp_ctxp->outer_context == ctx));
8115 if (ctx->region_type != ORT_WORKSHARE
8116 && ctx->region_type != ORT_TASKGROUP
8117 && ctx->region_type != ORT_SIMD
8118 && ctx->region_type != ORT_ACC)
8119 return false;
8120 else if (ctx->outer_context)
8121 return omp_is_private (ctx->outer_context, decl, simd);
8122 return false;
8125 /* Return true if DECL is private within a parallel region
8126 that binds to the current construct's context or in parallel
8127 region's REDUCTION clause. */
8129 static bool
8130 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
8132 splay_tree_node n;
8136 ctx = ctx->outer_context;
8137 if (ctx == NULL)
8139 if (is_global_var (decl))
8140 return false;
8142 /* References might be private, but might be shared too,
8143 when checking for copyprivate, assume they might be
8144 private, otherwise assume they might be shared. */
8145 if (copyprivate)
8146 return true;
8148 if (omp_privatize_by_reference (decl))
8149 return false;
8151 /* Treat C++ privatized non-static data members outside
8152 of the privatization the same. */
8153 if (omp_member_access_dummy_var (decl))
8154 return false;
8156 return true;
8159 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8161 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8162 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
8164 if ((ctx->region_type & ORT_TARGET_DATA) != 0
8165 || n == NULL
8166 || (n->value & GOVD_MAP) == 0)
8167 continue;
8168 return false;
8171 if (n != NULL)
8173 if ((n->value & GOVD_LOCAL) != 0
8174 && omp_member_access_dummy_var (decl))
8175 return false;
8176 return (n->value & GOVD_SHARED) == 0;
8179 if (ctx->region_type == ORT_WORKSHARE
8180 || ctx->region_type == ORT_TASKGROUP
8181 || ctx->region_type == ORT_SIMD
8182 || ctx->region_type == ORT_ACC)
8183 continue;
8185 break;
8187 while (1);
8188 return false;
8191 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8193 static tree
8194 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
8196 tree t = *tp;
8198 /* If this node has been visited, unmark it and keep looking. */
8199 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
8200 return t;
8202 if (IS_TYPE_OR_DECL_P (t))
8203 *walk_subtrees = 0;
8204 return NULL_TREE;
8208 /* Gimplify the affinity clause but effectively ignore it.
8209 Generate:
8210 var = begin;
8211 if ((step > 1) ? var <= end : var > end)
8212 locatator_var_expr; */
8214 static void
8215 gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
8217 tree last_iter = NULL_TREE;
8218 tree last_bind = NULL_TREE;
8219 tree label = NULL_TREE;
8220 tree *last_body = NULL;
8221 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8222 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
8224 tree t = OMP_CLAUSE_DECL (c);
8225 if (TREE_CODE (t) == TREE_LIST
8226 && TREE_PURPOSE (t)
8227 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8229 if (TREE_VALUE (t) == null_pointer_node)
8230 continue;
8231 if (TREE_PURPOSE (t) != last_iter)
8233 if (last_bind)
8235 append_to_statement_list (label, last_body);
8236 gimplify_and_add (last_bind, pre_p);
8237 last_bind = NULL_TREE;
8239 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8241 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8242 is_gimple_val, fb_rvalue) == GS_ERROR
8243 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8244 is_gimple_val, fb_rvalue) == GS_ERROR
8245 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8246 is_gimple_val, fb_rvalue) == GS_ERROR
8247 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8248 is_gimple_val, fb_rvalue)
8249 == GS_ERROR))
8250 return;
8252 last_iter = TREE_PURPOSE (t);
8253 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8254 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
8255 NULL, block);
8256 last_body = &BIND_EXPR_BODY (last_bind);
8257 tree cond = NULL_TREE;
8258 location_t loc = OMP_CLAUSE_LOCATION (c);
8259 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8261 tree var = TREE_VEC_ELT (it, 0);
8262 tree begin = TREE_VEC_ELT (it, 1);
8263 tree end = TREE_VEC_ELT (it, 2);
8264 tree step = TREE_VEC_ELT (it, 3);
8265 loc = DECL_SOURCE_LOCATION (var);
8266 tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8267 var, begin);
8268 append_to_statement_list_force (tem, last_body);
8270 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8271 step, build_zero_cst (TREE_TYPE (step)));
8272 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
8273 var, end);
8274 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8275 var, end);
8276 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
8277 cond1, cond2, cond3);
8278 if (cond)
8279 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
8280 boolean_type_node, cond, cond1);
8281 else
8282 cond = cond1;
8284 tree cont_label = create_artificial_label (loc);
8285 label = build1 (LABEL_EXPR, void_type_node, cont_label);
8286 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
8287 void_node,
8288 build_and_jump (&cont_label));
8289 append_to_statement_list_force (tem, last_body);
8291 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8293 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
8294 last_body);
8295 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8297 if (error_operand_p (TREE_VALUE (t)))
8298 return;
8299 append_to_statement_list_force (TREE_VALUE (t), last_body);
8300 TREE_VALUE (t) = null_pointer_node;
8302 else
8304 if (last_bind)
8306 append_to_statement_list (label, last_body);
8307 gimplify_and_add (last_bind, pre_p);
8308 last_bind = NULL_TREE;
8310 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8312 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8313 NULL, is_gimple_val, fb_rvalue);
8314 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8316 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8317 return;
8318 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8319 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
8320 return;
8321 gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
8324 if (last_bind)
8326 append_to_statement_list (label, last_body);
8327 gimplify_and_add (last_bind, pre_p);
8329 return;
8332 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8333 lower all the depend clauses by populating corresponding depend
8334 array. Returns 0 if there are no such depend clauses, or
8335 2 if all depend clauses should be removed, 1 otherwise. */
8337 static int
8338 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
8340 tree c;
8341 gimple *g;
8342 size_t n[5] = { 0, 0, 0, 0, 0 };
8343 bool unused[5];
8344 tree counts[5] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
8345 tree last_iter = NULL_TREE, last_count = NULL_TREE;
8346 size_t i, j;
8347 location_t first_loc = UNKNOWN_LOCATION;
8349 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8350 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8352 switch (OMP_CLAUSE_DEPEND_KIND (c))
8354 case OMP_CLAUSE_DEPEND_IN:
8355 i = 2;
8356 break;
8357 case OMP_CLAUSE_DEPEND_OUT:
8358 case OMP_CLAUSE_DEPEND_INOUT:
8359 i = 0;
8360 break;
8361 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8362 i = 1;
8363 break;
8364 case OMP_CLAUSE_DEPEND_DEPOBJ:
8365 i = 3;
8366 break;
8367 case OMP_CLAUSE_DEPEND_INOUTSET:
8368 i = 4;
8369 break;
8370 default:
8371 gcc_unreachable ();
8373 tree t = OMP_CLAUSE_DECL (c);
8374 if (first_loc == UNKNOWN_LOCATION)
8375 first_loc = OMP_CLAUSE_LOCATION (c);
8376 if (TREE_CODE (t) == TREE_LIST
8377 && TREE_PURPOSE (t)
8378 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8380 if (TREE_PURPOSE (t) != last_iter)
8382 tree tcnt = size_one_node;
8383 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8385 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8386 is_gimple_val, fb_rvalue) == GS_ERROR
8387 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8388 is_gimple_val, fb_rvalue) == GS_ERROR
8389 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8390 is_gimple_val, fb_rvalue) == GS_ERROR
8391 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8392 is_gimple_val, fb_rvalue)
8393 == GS_ERROR))
8394 return 2;
8395 tree var = TREE_VEC_ELT (it, 0);
8396 tree begin = TREE_VEC_ELT (it, 1);
8397 tree end = TREE_VEC_ELT (it, 2);
8398 tree step = TREE_VEC_ELT (it, 3);
8399 tree orig_step = TREE_VEC_ELT (it, 4);
8400 tree type = TREE_TYPE (var);
8401 tree stype = TREE_TYPE (step);
8402 location_t loc = DECL_SOURCE_LOCATION (var);
8403 tree endmbegin;
8404 /* Compute count for this iterator as
8405 orig_step > 0
8406 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8407 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8408 and compute product of those for the entire depend
8409 clause. */
8410 if (POINTER_TYPE_P (type))
8411 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8412 stype, end, begin);
8413 else
8414 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8415 end, begin);
8416 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8417 step,
8418 build_int_cst (stype, 1));
8419 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8420 build_int_cst (stype, 1));
8421 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8422 unshare_expr (endmbegin),
8423 stepm1);
8424 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8425 pos, step);
8426 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8427 endmbegin, stepp1);
8428 if (TYPE_UNSIGNED (stype))
8430 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8431 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8433 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8434 neg, step);
8435 step = NULL_TREE;
8436 tree cond = fold_build2_loc (loc, LT_EXPR,
8437 boolean_type_node,
8438 begin, end);
8439 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8440 build_int_cst (stype, 0));
8441 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8442 end, begin);
8443 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8444 build_int_cst (stype, 0));
8445 tree osteptype = TREE_TYPE (orig_step);
8446 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8447 orig_step,
8448 build_int_cst (osteptype, 0));
8449 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8450 cond, pos, neg);
8451 cnt = fold_convert_loc (loc, sizetype, cnt);
8452 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8453 fb_rvalue) == GS_ERROR)
8454 return 2;
8455 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8457 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8458 fb_rvalue) == GS_ERROR)
8459 return 2;
8460 last_iter = TREE_PURPOSE (t);
8461 last_count = tcnt;
8463 if (counts[i] == NULL_TREE)
8464 counts[i] = last_count;
8465 else
8466 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8467 PLUS_EXPR, counts[i], last_count);
8469 else
8470 n[i]++;
8472 for (i = 0; i < 5; i++)
8473 if (counts[i])
8474 break;
8475 if (i == 5)
8476 return 0;
8478 tree total = size_zero_node;
8479 for (i = 0; i < 5; i++)
8481 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8482 if (counts[i] == NULL_TREE)
8483 counts[i] = size_zero_node;
8484 if (n[i])
8485 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8486 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8487 fb_rvalue) == GS_ERROR)
8488 return 2;
8489 total = size_binop (PLUS_EXPR, total, counts[i]);
8492 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8493 == GS_ERROR)
8494 return 2;
8495 bool is_old = unused[1] && unused[3] && unused[4];
8496 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8497 size_int (is_old ? 1 : 4));
8498 if (!unused[4])
8499 totalpx = size_binop (PLUS_EXPR, totalpx,
8500 size_binop (MULT_EXPR, counts[4], size_int (2)));
8501 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8502 tree array = create_tmp_var_raw (type);
8503 TREE_ADDRESSABLE (array) = 1;
8504 if (!poly_int_tree_p (totalpx))
8506 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8507 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8508 if (gimplify_omp_ctxp)
8510 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8511 while (ctx
8512 && (ctx->region_type == ORT_WORKSHARE
8513 || ctx->region_type == ORT_TASKGROUP
8514 || ctx->region_type == ORT_SIMD
8515 || ctx->region_type == ORT_ACC))
8516 ctx = ctx->outer_context;
8517 if (ctx)
8518 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
8520 gimplify_vla_decl (array, pre_p);
8522 else
8523 gimple_add_tmp_var (array);
8524 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8525 NULL_TREE);
8526 tree tem;
8527 if (!is_old)
8529 tem = build2 (MODIFY_EXPR, void_type_node, r,
8530 build_int_cst (ptr_type_node, 0));
8531 gimplify_and_add (tem, pre_p);
8532 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8533 NULL_TREE);
8535 tem = build2 (MODIFY_EXPR, void_type_node, r,
8536 fold_convert (ptr_type_node, total));
8537 gimplify_and_add (tem, pre_p);
8538 for (i = 1; i < (is_old ? 2 : 4); i++)
8540 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8541 NULL_TREE, NULL_TREE);
8542 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8543 gimplify_and_add (tem, pre_p);
8546 tree cnts[6];
8547 for (j = 5; j; j--)
8548 if (!unused[j - 1])
8549 break;
8550 for (i = 0; i < 5; i++)
8552 if (i && (i >= j || unused[i - 1]))
8554 cnts[i] = cnts[i - 1];
8555 continue;
8557 cnts[i] = create_tmp_var (sizetype);
8558 if (i == 0)
8559 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8560 else
8562 tree t;
8563 if (is_old)
8564 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8565 else
8566 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8567 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8568 == GS_ERROR)
8569 return 2;
8570 g = gimple_build_assign (cnts[i], t);
8572 gimple_seq_add_stmt (pre_p, g);
8574 if (unused[4])
8575 cnts[5] = NULL_TREE;
8576 else
8578 tree t = size_binop (PLUS_EXPR, total, size_int (5));
8579 cnts[5] = create_tmp_var (sizetype);
8580 g = gimple_build_assign (cnts[i], t);
8581 gimple_seq_add_stmt (pre_p, g);
8584 last_iter = NULL_TREE;
8585 tree last_bind = NULL_TREE;
8586 tree *last_body = NULL;
8587 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8588 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8590 switch (OMP_CLAUSE_DEPEND_KIND (c))
8592 case OMP_CLAUSE_DEPEND_IN:
8593 i = 2;
8594 break;
8595 case OMP_CLAUSE_DEPEND_OUT:
8596 case OMP_CLAUSE_DEPEND_INOUT:
8597 i = 0;
8598 break;
8599 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8600 i = 1;
8601 break;
8602 case OMP_CLAUSE_DEPEND_DEPOBJ:
8603 i = 3;
8604 break;
8605 case OMP_CLAUSE_DEPEND_INOUTSET:
8606 i = 4;
8607 break;
8608 default:
8609 gcc_unreachable ();
8611 tree t = OMP_CLAUSE_DECL (c);
8612 if (TREE_CODE (t) == TREE_LIST
8613 && TREE_PURPOSE (t)
8614 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8616 if (TREE_PURPOSE (t) != last_iter)
8618 if (last_bind)
8619 gimplify_and_add (last_bind, pre_p);
8620 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8621 last_bind = build3 (BIND_EXPR, void_type_node,
8622 BLOCK_VARS (block), NULL, block);
8623 TREE_SIDE_EFFECTS (last_bind) = 1;
8624 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8625 tree *p = &BIND_EXPR_BODY (last_bind);
8626 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8628 tree var = TREE_VEC_ELT (it, 0);
8629 tree begin = TREE_VEC_ELT (it, 1);
8630 tree end = TREE_VEC_ELT (it, 2);
8631 tree step = TREE_VEC_ELT (it, 3);
8632 tree orig_step = TREE_VEC_ELT (it, 4);
8633 tree type = TREE_TYPE (var);
8634 location_t loc = DECL_SOURCE_LOCATION (var);
8635 /* Emit:
8636 var = begin;
8637 goto cond_label;
8638 beg_label:
8640 var = var + step;
8641 cond_label:
8642 if (orig_step > 0) {
8643 if (var < end) goto beg_label;
8644 } else {
8645 if (var > end) goto beg_label;
8647 for each iterator, with inner iterators added to
8648 the ... above. */
8649 tree beg_label = create_artificial_label (loc);
8650 tree cond_label = NULL_TREE;
8651 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8652 var, begin);
8653 append_to_statement_list_force (tem, p);
8654 tem = build_and_jump (&cond_label);
8655 append_to_statement_list_force (tem, p);
8656 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8657 append_to_statement_list (tem, p);
8658 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8659 NULL_TREE, NULL_TREE);
8660 TREE_SIDE_EFFECTS (bind) = 1;
8661 SET_EXPR_LOCATION (bind, loc);
8662 append_to_statement_list_force (bind, p);
8663 if (POINTER_TYPE_P (type))
8664 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8665 var, fold_convert_loc (loc, sizetype,
8666 step));
8667 else
8668 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8669 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8670 var, tem);
8671 append_to_statement_list_force (tem, p);
8672 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8673 append_to_statement_list (tem, p);
8674 tree cond = fold_build2_loc (loc, LT_EXPR,
8675 boolean_type_node,
8676 var, end);
8677 tree pos
8678 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8679 cond, build_and_jump (&beg_label),
8680 void_node);
8681 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8682 var, end);
8683 tree neg
8684 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8685 cond, build_and_jump (&beg_label),
8686 void_node);
8687 tree osteptype = TREE_TYPE (orig_step);
8688 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8689 orig_step,
8690 build_int_cst (osteptype, 0));
8691 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8692 cond, pos, neg);
8693 append_to_statement_list_force (tem, p);
8694 p = &BIND_EXPR_BODY (bind);
8696 last_body = p;
8698 last_iter = TREE_PURPOSE (t);
8699 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8701 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8702 0), last_body);
8703 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8705 if (error_operand_p (TREE_VALUE (t)))
8706 return 2;
8707 if (TREE_VALUE (t) != null_pointer_node)
8708 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8709 if (i == 4)
8711 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8712 NULL_TREE, NULL_TREE);
8713 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8714 NULL_TREE, NULL_TREE);
8715 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
8716 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8717 void_type_node, r, r2);
8718 append_to_statement_list_force (tem, last_body);
8719 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8720 void_type_node, cnts[i],
8721 size_binop (PLUS_EXPR, cnts[i],
8722 size_int (1)));
8723 append_to_statement_list_force (tem, last_body);
8724 i = 5;
8726 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8727 NULL_TREE, NULL_TREE);
8728 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8729 void_type_node, r, TREE_VALUE (t));
8730 append_to_statement_list_force (tem, last_body);
8731 if (i == 5)
8733 r = build4 (ARRAY_REF, ptr_type_node, array,
8734 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
8735 NULL_TREE, NULL_TREE);
8736 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
8737 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8738 void_type_node, r, tem);
8739 append_to_statement_list_force (tem, last_body);
8741 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8742 void_type_node, cnts[i],
8743 size_binop (PLUS_EXPR, cnts[i],
8744 size_int (1 + (i == 5))));
8745 append_to_statement_list_force (tem, last_body);
8746 TREE_VALUE (t) = null_pointer_node;
8748 else
8750 if (last_bind)
8752 gimplify_and_add (last_bind, pre_p);
8753 last_bind = NULL_TREE;
8755 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8757 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8758 NULL, is_gimple_val, fb_rvalue);
8759 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8761 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8762 return 2;
8763 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
8764 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8765 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8766 is_gimple_val, fb_rvalue) == GS_ERROR)
8767 return 2;
8768 if (i == 4)
8770 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8771 NULL_TREE, NULL_TREE);
8772 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8773 NULL_TREE, NULL_TREE);
8774 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
8775 tem = build2 (MODIFY_EXPR, void_type_node, r, r2);
8776 gimplify_and_add (tem, pre_p);
8777 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR,
8778 cnts[i],
8779 size_int (1)));
8780 gimple_seq_add_stmt (pre_p, g);
8781 i = 5;
8783 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8784 NULL_TREE, NULL_TREE);
8785 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8786 gimplify_and_add (tem, pre_p);
8787 if (i == 5)
8789 r = build4 (ARRAY_REF, ptr_type_node, array,
8790 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
8791 NULL_TREE, NULL_TREE);
8792 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
8793 tem = build2 (MODIFY_EXPR, void_type_node, r, tem);
8794 append_to_statement_list_force (tem, last_body);
8795 gimplify_and_add (tem, pre_p);
8797 g = gimple_build_assign (cnts[i],
8798 size_binop (PLUS_EXPR, cnts[i],
8799 size_int (1 + (i == 5))));
8800 gimple_seq_add_stmt (pre_p, g);
8803 if (last_bind)
8804 gimplify_and_add (last_bind, pre_p);
8805 tree cond = boolean_false_node;
8806 if (is_old)
8808 if (!unused[0])
8809 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8810 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8811 size_int (2)));
8812 if (!unused[2])
8813 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8814 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8815 cnts[2],
8816 size_binop_loc (first_loc, PLUS_EXPR,
8817 totalpx,
8818 size_int (1))));
8820 else
8822 tree prev = size_int (5);
8823 for (i = 0; i < 5; i++)
8825 if (unused[i])
8826 continue;
8827 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8828 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8829 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8830 cnts[i], unshare_expr (prev)));
8833 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8834 build_call_expr_loc (first_loc,
8835 builtin_decl_explicit (BUILT_IN_TRAP),
8836 0), void_node);
8837 gimplify_and_add (tem, pre_p);
8838 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8839 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8840 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8841 OMP_CLAUSE_CHAIN (c) = *list_p;
8842 *list_p = c;
8843 return 1;
8846 /* For a set of mappings describing an array section pointed to by a struct
8847 (or derived type, etc.) component, create an "alloc" or "release" node to
8848 insert into a list following a GOMP_MAP_STRUCT node. For some types of
8849 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
8850 be created that is inserted into the list of mapping nodes attached to the
8851 directive being processed -- not part of the sorted list of nodes after
8852 GOMP_MAP_STRUCT.
8854 CODE is the code of the directive being processed. GRP_START and GRP_END
8855 are the first and last of two or three nodes representing this array section
8856 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
8857 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
8858 filled with the additional node described above, if needed.
8860 This function does not add the new nodes to any lists itself. It is the
8861 responsibility of the caller to do that. */
8863 static tree
8864 build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
8865 tree *extra_node)
8867 enum gomp_map_kind mkind
8868 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8869 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8871 gcc_assert (grp_start != grp_end);
8873 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
8874 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8875 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
8876 OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
8877 tree grp_mid = NULL_TREE;
8878 if (OMP_CLAUSE_CHAIN (grp_start) != grp_end)
8879 grp_mid = OMP_CLAUSE_CHAIN (grp_start);
8881 if (grp_mid
8882 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
8883 && OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_TO_PSET)
8884 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (grp_mid);
8885 else
8886 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8888 if (grp_mid
8889 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
8890 && (OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ALWAYS_POINTER
8891 || OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ATTACH_DETACH))
8893 tree c3
8894 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
8895 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8896 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
8897 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8898 OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
8900 *extra_node = c3;
8902 else
8903 *extra_node = NULL_TREE;
8905 return c2;
8908 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8909 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8910 If BASE_REF is non-NULL and the containing object is a reference, set
8911 *BASE_REF to that reference before dereferencing the object.
8912 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8913 has array type, else return NULL. */
8915 static tree
8916 extract_base_bit_offset (tree base, poly_int64 *bitposp,
8917 poly_offset_int *poffsetp)
8919 tree offset;
8920 poly_int64 bitsize, bitpos;
8921 machine_mode mode;
8922 int unsignedp, reversep, volatilep = 0;
8923 poly_offset_int poffset;
8925 STRIP_NOPS (base);
8927 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8928 &unsignedp, &reversep, &volatilep);
8930 STRIP_NOPS (base);
8932 if (offset && poly_int_tree_p (offset))
8934 poffset = wi::to_poly_offset (offset);
8935 offset = NULL_TREE;
8937 else
8938 poffset = 0;
8940 if (maybe_ne (bitpos, 0))
8941 poffset += bits_to_bytes_round_down (bitpos);
8943 *bitposp = bitpos;
8944 *poffsetp = poffset;
8946 return base;
8949 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
8950 started processing the group yet. The TEMPORARY mark is used when we first
8951 encounter a group on a depth-first traversal, and the PERMANENT mark is used
8952 when we have processed all the group's children (i.e. all the base pointers
8953 referred to by the group's mapping nodes, recursively). */
8955 enum omp_tsort_mark {
8956 UNVISITED,
8957 TEMPORARY,
8958 PERMANENT
8961 /* Hash for trees based on operand_equal_p. Like tree_operand_hash
8962 but ignores side effects in the equality comparisons. */
8964 struct tree_operand_hash_no_se : tree_operand_hash
8966 static inline bool equal (const value_type &,
8967 const compare_type &);
8970 inline bool
8971 tree_operand_hash_no_se::equal (const value_type &t1,
8972 const compare_type &t2)
8974 return operand_equal_p (t1, t2, OEP_MATCH_SIDE_EFFECTS);
8977 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
8978 clause. */
8980 struct omp_mapping_group {
8981 tree *grp_start;
8982 tree grp_end;
8983 omp_tsort_mark mark;
8984 /* If we've removed the group but need to reindex, mark the group as
8985 deleted. */
8986 bool deleted;
8987 struct omp_mapping_group *sibling;
8988 struct omp_mapping_group *next;
8991 DEBUG_FUNCTION void
8992 debug_mapping_group (omp_mapping_group *grp)
8994 tree tmp = OMP_CLAUSE_CHAIN (grp->grp_end);
8995 OMP_CLAUSE_CHAIN (grp->grp_end) = NULL;
8996 debug_generic_expr (*grp->grp_start);
8997 OMP_CLAUSE_CHAIN (grp->grp_end) = tmp;
9000 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
9001 isn't one. */
9003 static tree
9004 omp_get_base_pointer (tree expr)
9006 while (TREE_CODE (expr) == ARRAY_REF
9007 || TREE_CODE (expr) == COMPONENT_REF)
9008 expr = TREE_OPERAND (expr, 0);
9010 if (TREE_CODE (expr) == INDIRECT_REF
9011 || (TREE_CODE (expr) == MEM_REF
9012 && integer_zerop (TREE_OPERAND (expr, 1))))
9014 expr = TREE_OPERAND (expr, 0);
9015 while (TREE_CODE (expr) == COMPOUND_EXPR)
9016 expr = TREE_OPERAND (expr, 1);
9017 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
9018 expr = TREE_OPERAND (expr, 0);
9019 if (TREE_CODE (expr) == SAVE_EXPR)
9020 expr = TREE_OPERAND (expr, 0);
9021 STRIP_NOPS (expr);
9022 return expr;
9025 return NULL_TREE;
9028 /* Remove COMPONENT_REFS and indirections from EXPR. */
9030 static tree
9031 omp_strip_components_and_deref (tree expr)
9033 while (TREE_CODE (expr) == COMPONENT_REF
9034 || TREE_CODE (expr) == INDIRECT_REF
9035 || (TREE_CODE (expr) == MEM_REF
9036 && integer_zerop (TREE_OPERAND (expr, 1)))
9037 || TREE_CODE (expr) == POINTER_PLUS_EXPR
9038 || TREE_CODE (expr) == COMPOUND_EXPR)
9039 if (TREE_CODE (expr) == COMPOUND_EXPR)
9040 expr = TREE_OPERAND (expr, 1);
9041 else
9042 expr = TREE_OPERAND (expr, 0);
9044 STRIP_NOPS (expr);
9046 return expr;
9049 static tree
9050 omp_strip_indirections (tree expr)
9052 while (TREE_CODE (expr) == INDIRECT_REF
9053 || (TREE_CODE (expr) == MEM_REF
9054 && integer_zerop (TREE_OPERAND (expr, 1))))
9055 expr = TREE_OPERAND (expr, 0);
9057 return expr;
9060 /* An attach or detach operation depends directly on the address being
9061 attached/detached. Return that address, or none if there are no
9062 attachments/detachments. */
9064 static tree
9065 omp_get_attachment (omp_mapping_group *grp)
9067 tree node = *grp->grp_start;
9069 switch (OMP_CLAUSE_MAP_KIND (node))
9071 case GOMP_MAP_TO:
9072 case GOMP_MAP_FROM:
9073 case GOMP_MAP_TOFROM:
9074 case GOMP_MAP_ALWAYS_FROM:
9075 case GOMP_MAP_ALWAYS_TO:
9076 case GOMP_MAP_ALWAYS_TOFROM:
9077 case GOMP_MAP_FORCE_FROM:
9078 case GOMP_MAP_FORCE_TO:
9079 case GOMP_MAP_FORCE_TOFROM:
9080 case GOMP_MAP_FORCE_PRESENT:
9081 case GOMP_MAP_ALLOC:
9082 case GOMP_MAP_RELEASE:
9083 case GOMP_MAP_DELETE:
9084 case GOMP_MAP_FORCE_ALLOC:
9085 if (node == grp->grp_end)
9086 return NULL_TREE;
9088 node = OMP_CLAUSE_CHAIN (node);
9089 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9091 gcc_assert (node != grp->grp_end);
9092 node = OMP_CLAUSE_CHAIN (node);
9094 if (node)
9095 switch (OMP_CLAUSE_MAP_KIND (node))
9097 case GOMP_MAP_POINTER:
9098 case GOMP_MAP_ALWAYS_POINTER:
9099 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9100 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9101 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9102 return NULL_TREE;
9104 case GOMP_MAP_ATTACH_DETACH:
9105 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9106 return OMP_CLAUSE_DECL (node);
9108 default:
9109 internal_error ("unexpected mapping node");
9111 return error_mark_node;
9113 case GOMP_MAP_TO_PSET:
9114 gcc_assert (node != grp->grp_end);
9115 node = OMP_CLAUSE_CHAIN (node);
9116 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9117 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9118 return OMP_CLAUSE_DECL (node);
9119 else
9120 internal_error ("unexpected mapping node");
9121 return error_mark_node;
9123 case GOMP_MAP_ATTACH:
9124 case GOMP_MAP_DETACH:
9125 node = OMP_CLAUSE_CHAIN (node);
9126 if (!node || *grp->grp_start == grp->grp_end)
9127 return OMP_CLAUSE_DECL (*grp->grp_start);
9128 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9129 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9130 return OMP_CLAUSE_DECL (*grp->grp_start);
9131 else
9132 internal_error ("unexpected mapping node");
9133 return error_mark_node;
9135 case GOMP_MAP_STRUCT:
9136 case GOMP_MAP_FORCE_DEVICEPTR:
9137 case GOMP_MAP_DEVICE_RESIDENT:
9138 case GOMP_MAP_LINK:
9139 case GOMP_MAP_IF_PRESENT:
9140 case GOMP_MAP_FIRSTPRIVATE:
9141 case GOMP_MAP_FIRSTPRIVATE_INT:
9142 case GOMP_MAP_USE_DEVICE_PTR:
9143 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9144 return NULL_TREE;
9146 default:
9147 internal_error ("unexpected mapping node");
9150 return error_mark_node;
9153 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9154 mappings, return the chain pointer to the end of that group in the list. */
9156 static tree *
9157 omp_group_last (tree *start_p)
9159 tree c = *start_p, nc, *grp_last_p = start_p;
9161 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9163 nc = OMP_CLAUSE_CHAIN (c);
9165 if (!nc || OMP_CLAUSE_CODE (nc) != OMP_CLAUSE_MAP)
9166 return grp_last_p;
9168 switch (OMP_CLAUSE_MAP_KIND (c))
9170 default:
9171 while (nc
9172 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9173 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9174 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9175 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH
9176 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
9177 || (OMP_CLAUSE_MAP_KIND (nc)
9178 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9179 || (OMP_CLAUSE_MAP_KIND (nc)
9180 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)
9181 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ALWAYS_POINTER
9182 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_TO_PSET))
9184 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9185 c = nc;
9186 tree nc2 = OMP_CLAUSE_CHAIN (nc);
9187 if (nc2
9188 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9189 && (OMP_CLAUSE_MAP_KIND (nc)
9190 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9191 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH)
9193 grp_last_p = &OMP_CLAUSE_CHAIN (nc);
9194 c = nc2;
9195 nc2 = OMP_CLAUSE_CHAIN (nc2);
9197 nc = nc2;
9199 break;
9201 case GOMP_MAP_ATTACH:
9202 case GOMP_MAP_DETACH:
9203 /* This is a weird artifact of how directives are parsed: bare attach or
9204 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9205 FIRSTPRIVATE_REFERENCE node. FIXME. */
9206 if (nc
9207 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9208 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9209 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER))
9210 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9211 break;
9213 case GOMP_MAP_TO_PSET:
9214 if (OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9215 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH
9216 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH))
9217 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9218 break;
9220 case GOMP_MAP_STRUCT:
9222 unsigned HOST_WIDE_INT num_mappings
9223 = tree_to_uhwi (OMP_CLAUSE_SIZE (c));
9224 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9225 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9226 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH)
9227 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9228 for (unsigned i = 0; i < num_mappings; i++)
9229 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9231 break;
9234 return grp_last_p;
9237 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9238 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9239 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9240 if we have more than one such group, else return NULL. */
9242 static void
9243 omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
9244 tree gather_sentinel)
9246 for (tree *cp = list_p;
9247 *cp && *cp != gather_sentinel;
9248 cp = &OMP_CLAUSE_CHAIN (*cp))
9250 if (OMP_CLAUSE_CODE (*cp) != OMP_CLAUSE_MAP)
9251 continue;
9253 tree *grp_last_p = omp_group_last (cp);
9254 omp_mapping_group grp;
9256 grp.grp_start = cp;
9257 grp.grp_end = *grp_last_p;
9258 grp.mark = UNVISITED;
9259 grp.sibling = NULL;
9260 grp.deleted = false;
9261 grp.next = NULL;
9262 groups->safe_push (grp);
9264 cp = grp_last_p;
9268 static vec<omp_mapping_group> *
9269 omp_gather_mapping_groups (tree *list_p)
9271 vec<omp_mapping_group> *groups = new vec<omp_mapping_group> ();
9273 omp_gather_mapping_groups_1 (list_p, groups, NULL_TREE);
9275 if (groups->length () > 0)
9276 return groups;
9277 else
9279 delete groups;
9280 return NULL;
9284 /* A pointer mapping group GRP may define a block of memory starting at some
9285 base address, and maybe also define a firstprivate pointer or firstprivate
9286 reference that points to that block. The return value is a node containing
9287 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9288 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9289 return the number of consecutive chained nodes in CHAINED. */
9291 static tree
9292 omp_group_base (omp_mapping_group *grp, unsigned int *chained,
9293 tree *firstprivate)
9295 tree node = *grp->grp_start;
9297 *firstprivate = NULL_TREE;
9298 *chained = 1;
9300 switch (OMP_CLAUSE_MAP_KIND (node))
9302 case GOMP_MAP_TO:
9303 case GOMP_MAP_FROM:
9304 case GOMP_MAP_TOFROM:
9305 case GOMP_MAP_ALWAYS_FROM:
9306 case GOMP_MAP_ALWAYS_TO:
9307 case GOMP_MAP_ALWAYS_TOFROM:
9308 case GOMP_MAP_FORCE_FROM:
9309 case GOMP_MAP_FORCE_TO:
9310 case GOMP_MAP_FORCE_TOFROM:
9311 case GOMP_MAP_FORCE_PRESENT:
9312 case GOMP_MAP_ALLOC:
9313 case GOMP_MAP_RELEASE:
9314 case GOMP_MAP_DELETE:
9315 case GOMP_MAP_FORCE_ALLOC:
9316 case GOMP_MAP_IF_PRESENT:
9317 if (node == grp->grp_end)
9318 return node;
9320 node = OMP_CLAUSE_CHAIN (node);
9321 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9323 if (node == grp->grp_end)
9324 return *grp->grp_start;
9325 node = OMP_CLAUSE_CHAIN (node);
9327 if (node)
9328 switch (OMP_CLAUSE_MAP_KIND (node))
9330 case GOMP_MAP_POINTER:
9331 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9332 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9333 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9334 *firstprivate = OMP_CLAUSE_DECL (node);
9335 return *grp->grp_start;
9337 case GOMP_MAP_ALWAYS_POINTER:
9338 case GOMP_MAP_ATTACH_DETACH:
9339 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9340 return *grp->grp_start;
9342 default:
9343 internal_error ("unexpected mapping node");
9345 else
9346 internal_error ("unexpected mapping node");
9347 return error_mark_node;
9349 case GOMP_MAP_TO_PSET:
9350 gcc_assert (node != grp->grp_end);
9351 node = OMP_CLAUSE_CHAIN (node);
9352 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9353 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9354 return NULL_TREE;
9355 else
9356 internal_error ("unexpected mapping node");
9357 return error_mark_node;
9359 case GOMP_MAP_ATTACH:
9360 case GOMP_MAP_DETACH:
9361 node = OMP_CLAUSE_CHAIN (node);
9362 if (!node || *grp->grp_start == grp->grp_end)
9363 return NULL_TREE;
9364 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9365 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9367 /* We're mapping the base pointer itself in a bare attach or detach
9368 node. This is a side effect of how parsing works, and the mapping
9369 will be removed anyway (at least for enter/exit data directives).
9370 We should ignore the mapping here. FIXME. */
9371 return NULL_TREE;
9373 else
9374 internal_error ("unexpected mapping node");
9375 return error_mark_node;
9377 case GOMP_MAP_STRUCT:
9379 unsigned HOST_WIDE_INT num_mappings
9380 = tree_to_uhwi (OMP_CLAUSE_SIZE (node));
9381 node = OMP_CLAUSE_CHAIN (node);
9382 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9383 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9385 *firstprivate = OMP_CLAUSE_DECL (node);
9386 node = OMP_CLAUSE_CHAIN (node);
9388 *chained = num_mappings;
9389 return node;
9392 case GOMP_MAP_FORCE_DEVICEPTR:
9393 case GOMP_MAP_DEVICE_RESIDENT:
9394 case GOMP_MAP_LINK:
9395 case GOMP_MAP_FIRSTPRIVATE:
9396 case GOMP_MAP_FIRSTPRIVATE_INT:
9397 case GOMP_MAP_USE_DEVICE_PTR:
9398 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9399 return NULL_TREE;
9401 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9402 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9403 case GOMP_MAP_POINTER:
9404 case GOMP_MAP_ALWAYS_POINTER:
9405 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9406 /* These shouldn't appear by themselves. */
9407 if (!seen_error ())
9408 internal_error ("unexpected pointer mapping node");
9409 return error_mark_node;
9411 default:
9412 gcc_unreachable ();
9415 return error_mark_node;
9418 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9419 nodes by tree_operand_hash_no_se. */
9421 static void
9422 omp_index_mapping_groups_1 (hash_map<tree_operand_hash_no_se,
9423 omp_mapping_group *> *grpmap,
9424 vec<omp_mapping_group> *groups,
9425 tree reindex_sentinel)
9427 omp_mapping_group *grp;
9428 unsigned int i;
9429 bool reindexing = reindex_sentinel != NULL_TREE, above_hwm = false;
9431 FOR_EACH_VEC_ELT (*groups, i, grp)
9433 if (reindexing && *grp->grp_start == reindex_sentinel)
9434 above_hwm = true;
9436 if (reindexing && !above_hwm)
9437 continue;
9439 tree fpp;
9440 unsigned int chained;
9441 tree node = omp_group_base (grp, &chained, &fpp);
9443 if (node == error_mark_node || (!node && !fpp))
9444 continue;
9446 for (unsigned j = 0;
9447 node && j < chained;
9448 node = OMP_CLAUSE_CHAIN (node), j++)
9450 tree decl = OMP_CLAUSE_DECL (node);
9451 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9452 meaning node-hash lookups don't work. This is a workaround for
9453 that, but ideally we should just create the INDIRECT_REF at
9454 source instead. FIXME. */
9455 if (TREE_CODE (decl) == MEM_REF
9456 && integer_zerop (TREE_OPERAND (decl, 1)))
9457 decl = build_fold_indirect_ref (TREE_OPERAND (decl, 0));
9459 omp_mapping_group **prev = grpmap->get (decl);
9461 if (prev && *prev == grp)
9462 /* Empty. */;
9463 else if (prev)
9465 /* Mapping the same thing twice is normally diagnosed as an error,
9466 but can happen under some circumstances, e.g. in pr99928-16.c,
9467 the directive:
9469 #pragma omp target simd reduction(+:a[:3]) \
9470 map(always, tofrom: a[:6])
9473 will result in two "a[0]" mappings (of different sizes). */
9475 grp->sibling = (*prev)->sibling;
9476 (*prev)->sibling = grp;
9478 else
9479 grpmap->put (decl, grp);
9482 if (!fpp)
9483 continue;
9485 omp_mapping_group **prev = grpmap->get (fpp);
9486 if (prev && *prev != grp)
9488 grp->sibling = (*prev)->sibling;
9489 (*prev)->sibling = grp;
9491 else
9492 grpmap->put (fpp, grp);
9496 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9497 omp_index_mapping_groups (vec<omp_mapping_group> *groups)
9499 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9500 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9502 omp_index_mapping_groups_1 (grpmap, groups, NULL_TREE);
9504 return grpmap;
9507 /* Rebuild group map from partially-processed clause list (during
9508 omp_build_struct_sibling_lists). We have already processed nodes up until
9509 a high-water mark (HWM). This is a bit tricky because the list is being
9510 reordered as it is scanned, but we know:
9512 1. The list after HWM has not been touched yet, so we can reindex it safely.
9514 2. The list before and including HWM has been altered, but remains
9515 well-formed throughout the sibling-list building operation.
9517 so, we can do the reindex operation in two parts, on the processed and
9518 then the unprocessed halves of the list. */
9520 static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9521 omp_reindex_mapping_groups (tree *list_p,
9522 vec<omp_mapping_group> *groups,
9523 vec<omp_mapping_group> *processed_groups,
9524 tree sentinel)
9526 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9527 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9529 processed_groups->truncate (0);
9531 omp_gather_mapping_groups_1 (list_p, processed_groups, sentinel);
9532 omp_index_mapping_groups_1 (grpmap, processed_groups, NULL_TREE);
9533 if (sentinel)
9534 omp_index_mapping_groups_1 (grpmap, groups, sentinel);
9536 return grpmap;
9539 /* Find the immediately-containing struct for a component ref (etc.)
9540 expression EXPR. */
9542 static tree
9543 omp_containing_struct (tree expr)
9545 tree expr0 = expr;
9547 STRIP_NOPS (expr);
9549 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9550 component ref. */
9551 if (TREE_CODE (expr) != ARRAY_REF && TREE_CODE (expr) != COMPONENT_REF)
9552 return expr0;
9554 while (TREE_CODE (expr) == ARRAY_REF)
9555 expr = TREE_OPERAND (expr, 0);
9557 if (TREE_CODE (expr) == COMPONENT_REF)
9558 expr = TREE_OPERAND (expr, 0);
9560 return expr;
9563 /* Return TRUE if DECL describes a component that is part of a whole structure
9564 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9565 that maps that structure, if present. */
9567 static bool
9568 omp_mapped_by_containing_struct (hash_map<tree_operand_hash_no_se,
9569 omp_mapping_group *> *grpmap,
9570 tree decl,
9571 omp_mapping_group **mapped_by_group)
9573 tree wsdecl = NULL_TREE;
9575 *mapped_by_group = NULL;
9577 while (true)
9579 wsdecl = omp_containing_struct (decl);
9580 if (wsdecl == decl)
9581 break;
9582 omp_mapping_group **wholestruct = grpmap->get (wsdecl);
9583 if (!wholestruct
9584 && TREE_CODE (wsdecl) == MEM_REF
9585 && integer_zerop (TREE_OPERAND (wsdecl, 1)))
9587 tree deref = TREE_OPERAND (wsdecl, 0);
9588 deref = build_fold_indirect_ref (deref);
9589 wholestruct = grpmap->get (deref);
9591 if (wholestruct)
9593 *mapped_by_group = *wholestruct;
9594 return true;
9596 decl = wsdecl;
9599 return false;
9602 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9603 FALSE on error. */
9605 static bool
9606 omp_tsort_mapping_groups_1 (omp_mapping_group ***outlist,
9607 vec<omp_mapping_group> *groups,
9608 hash_map<tree_operand_hash_no_se,
9609 omp_mapping_group *> *grpmap,
9610 omp_mapping_group *grp)
9612 if (grp->mark == PERMANENT)
9613 return true;
9614 if (grp->mark == TEMPORARY)
9616 fprintf (stderr, "when processing group:\n");
9617 debug_mapping_group (grp);
9618 internal_error ("base pointer cycle detected");
9619 return false;
9621 grp->mark = TEMPORARY;
9623 tree attaches_to = omp_get_attachment (grp);
9625 if (attaches_to)
9627 omp_mapping_group **basep = grpmap->get (attaches_to);
9629 if (basep && *basep != grp)
9631 for (omp_mapping_group *w = *basep; w; w = w->sibling)
9632 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9633 return false;
9637 tree decl = OMP_CLAUSE_DECL (*grp->grp_start);
9639 while (decl)
9641 tree base = omp_get_base_pointer (decl);
9643 if (!base)
9644 break;
9646 omp_mapping_group **innerp = grpmap->get (base);
9647 omp_mapping_group *wholestruct;
9649 /* We should treat whole-structure mappings as if all (pointer, in this
9650 case) members are mapped as individual list items. Check if we have
9651 such a whole-structure mapping, if we don't have an explicit reference
9652 to the pointer member itself. */
9653 if (!innerp
9654 && TREE_CODE (base) == COMPONENT_REF
9655 && omp_mapped_by_containing_struct (grpmap, base, &wholestruct))
9656 innerp = &wholestruct;
9658 if (innerp && *innerp != grp)
9660 for (omp_mapping_group *w = *innerp; w; w = w->sibling)
9661 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9662 return false;
9663 break;
9666 decl = base;
9669 grp->mark = PERMANENT;
9671 /* Emit grp to output list. */
9673 **outlist = grp;
9674 *outlist = &grp->next;
9676 return true;
9679 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
9680 before mappings that use those pointers. This is an implementation of the
9681 depth-first search algorithm, described e.g. at:
9683 https://en.wikipedia.org/wiki/Topological_sorting
9686 static omp_mapping_group *
9687 omp_tsort_mapping_groups (vec<omp_mapping_group> *groups,
9688 hash_map<tree_operand_hash_no_se, omp_mapping_group *>
9689 *grpmap)
9691 omp_mapping_group *grp, *outlist = NULL, **cursor;
9692 unsigned int i;
9694 cursor = &outlist;
9696 FOR_EACH_VEC_ELT (*groups, i, grp)
9698 if (grp->mark != PERMANENT)
9699 if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
9700 return NULL;
9703 return outlist;
9706 /* Split INLIST into two parts, moving groups corresponding to
9707 ALLOC/RELEASE/DELETE mappings to one list, and other mappings to another.
9708 The former list is then appended to the latter. Each sub-list retains the
9709 order of the original list.
9710 Note that ATTACH nodes are later moved to the end of the list in
9711 gimplify_adjust_omp_clauses, for target regions. */
9713 static omp_mapping_group *
9714 omp_segregate_mapping_groups (omp_mapping_group *inlist)
9716 omp_mapping_group *ard_groups = NULL, *tf_groups = NULL;
9717 omp_mapping_group **ard_tail = &ard_groups, **tf_tail = &tf_groups;
9719 for (omp_mapping_group *w = inlist; w;)
9721 tree c = *w->grp_start;
9722 omp_mapping_group *next = w->next;
9724 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9726 switch (OMP_CLAUSE_MAP_KIND (c))
9728 case GOMP_MAP_ALLOC:
9729 case GOMP_MAP_RELEASE:
9730 case GOMP_MAP_DELETE:
9731 *ard_tail = w;
9732 w->next = NULL;
9733 ard_tail = &w->next;
9734 break;
9736 default:
9737 *tf_tail = w;
9738 w->next = NULL;
9739 tf_tail = &w->next;
9742 w = next;
9745 /* Now splice the lists together... */
9746 *tf_tail = ard_groups;
9748 return tf_groups;
9751 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
9752 those groups based on the output list of omp_tsort_mapping_groups --
9753 singly-linked, threaded through each element's NEXT pointer starting at
9754 HEAD. Each list element appears exactly once in that linked list.
9756 Each element of GROUPS may correspond to one or several mapping nodes.
9757 Node groups are kept together, and in the reordered list, the positions of
9758 the original groups are reused for the positions of the reordered list.
9759 Hence if we have e.g.
9761 {to ptr ptr} firstprivate {tofrom ptr} ...
9762 ^ ^ ^
9763 first group non-"map" second group
9765 and say the second group contains a base pointer for the first so must be
9766 moved before it, the resulting list will contain:
9768 {tofrom ptr} firstprivate {to ptr ptr} ...
9769 ^ prev. second group ^ prev. first group
9772 static tree *
9773 omp_reorder_mapping_groups (vec<omp_mapping_group> *groups,
9774 omp_mapping_group *head,
9775 tree *list_p)
9777 omp_mapping_group *grp;
9778 unsigned int i;
9779 unsigned numgroups = groups->length ();
9780 auto_vec<tree> old_heads (numgroups);
9781 auto_vec<tree *> old_headps (numgroups);
9782 auto_vec<tree> new_heads (numgroups);
9783 auto_vec<tree> old_succs (numgroups);
9784 bool map_at_start = (list_p == (*groups)[0].grp_start);
9786 tree *new_grp_tail = NULL;
9788 /* Stash the start & end nodes of each mapping group before we start
9789 modifying the list. */
9790 FOR_EACH_VEC_ELT (*groups, i, grp)
9792 old_headps.quick_push (grp->grp_start);
9793 old_heads.quick_push (*grp->grp_start);
9794 old_succs.quick_push (OMP_CLAUSE_CHAIN (grp->grp_end));
9797 /* And similarly, the heads of the groups in the order we want to rearrange
9798 the list to. */
9799 for (omp_mapping_group *w = head; w; w = w->next)
9800 new_heads.quick_push (*w->grp_start);
9802 FOR_EACH_VEC_ELT (*groups, i, grp)
9804 gcc_assert (head);
9806 if (new_grp_tail && old_succs[i - 1] == old_heads[i])
9808 /* a {b c d} {e f g} h i j (original)
9810 a {k l m} {e f g} h i j (inserted new group on last iter)
9812 a {k l m} {n o p} h i j (this time, chain last group to new one)
9813 ^new_grp_tail
9815 *new_grp_tail = new_heads[i];
9817 else if (new_grp_tail)
9819 /* a {b c d} e {f g h} i j k (original)
9821 a {l m n} e {f g h} i j k (gap after last iter's group)
9823 a {l m n} e {o p q} h i j (chain last group to old successor)
9824 ^new_grp_tail
9826 *new_grp_tail = old_succs[i - 1];
9827 *old_headps[i] = new_heads[i];
9829 else
9831 /* The first inserted group -- point to new group, and leave end
9832 open.
9833 a {b c d} e f
9835 a {g h i...
9837 *grp->grp_start = new_heads[i];
9840 new_grp_tail = &OMP_CLAUSE_CHAIN (head->grp_end);
9842 head = head->next;
9845 if (new_grp_tail)
9846 *new_grp_tail = old_succs[numgroups - 1];
9848 gcc_assert (!head);
9850 return map_at_start ? (*groups)[0].grp_start : list_p;
9853 /* DECL is supposed to have lastprivate semantics in the outer contexts
9854 of combined/composite constructs, starting with OCTX.
9855 Add needed lastprivate, shared or map clause if no data sharing or
9856 mapping clause are present. IMPLICIT_P is true if it is an implicit
9857 clause (IV on simd), in which case the lastprivate will not be
9858 copied to some constructs. */
9860 static void
9861 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
9862 tree decl, bool implicit_p)
9864 struct gimplify_omp_ctx *orig_octx = octx;
9865 for (; octx; octx = octx->outer_context)
9867 if ((octx->region_type == ORT_COMBINED_PARALLEL
9868 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
9869 && splay_tree_lookup (octx->variables,
9870 (splay_tree_key) decl) == NULL)
9872 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
9873 continue;
9875 if ((octx->region_type & ORT_TASK) != 0
9876 && octx->combined_loop
9877 && splay_tree_lookup (octx->variables,
9878 (splay_tree_key) decl) == NULL)
9880 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9881 continue;
9883 if (implicit_p
9884 && octx->region_type == ORT_WORKSHARE
9885 && octx->combined_loop
9886 && splay_tree_lookup (octx->variables,
9887 (splay_tree_key) decl) == NULL
9888 && octx->outer_context
9889 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
9890 && splay_tree_lookup (octx->outer_context->variables,
9891 (splay_tree_key) decl) == NULL)
9893 octx = octx->outer_context;
9894 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9895 continue;
9897 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
9898 && octx->combined_loop
9899 && splay_tree_lookup (octx->variables,
9900 (splay_tree_key) decl) == NULL
9901 && !omp_check_private (octx, decl, false))
9903 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9904 continue;
9906 if (octx->region_type == ORT_COMBINED_TARGET)
9908 splay_tree_node n = splay_tree_lookup (octx->variables,
9909 (splay_tree_key) decl);
9910 if (n == NULL)
9912 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9913 octx = octx->outer_context;
9915 else if (!implicit_p
9916 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
9918 n->value &= ~(GOVD_FIRSTPRIVATE
9919 | GOVD_FIRSTPRIVATE_IMPLICIT
9920 | GOVD_EXPLICIT);
9921 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9922 octx = octx->outer_context;
9925 break;
9927 if (octx && (implicit_p || octx != orig_octx))
9928 omp_notice_variable (octx, decl, true);
9931 /* If we have mappings INNER and OUTER, where INNER is a component access and
9932 OUTER is a mapping of the whole containing struct, check that the mappings
9933 are compatible. We'll be deleting the inner mapping, so we need to make
9934 sure the outer mapping does (at least) the same transfers to/from the device
9935 as the inner mapping. */
9937 bool
9938 omp_check_mapping_compatibility (location_t loc,
9939 omp_mapping_group *outer,
9940 omp_mapping_group *inner)
9942 tree first_outer = *outer->grp_start, first_inner = *inner->grp_start;
9944 gcc_assert (OMP_CLAUSE_CODE (first_outer) == OMP_CLAUSE_MAP);
9945 gcc_assert (OMP_CLAUSE_CODE (first_inner) == OMP_CLAUSE_MAP);
9947 enum gomp_map_kind outer_kind = OMP_CLAUSE_MAP_KIND (first_outer);
9948 enum gomp_map_kind inner_kind = OMP_CLAUSE_MAP_KIND (first_inner);
9950 if (outer_kind == inner_kind)
9951 return true;
9953 switch (outer_kind)
9955 case GOMP_MAP_ALWAYS_TO:
9956 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9957 || inner_kind == GOMP_MAP_ALLOC
9958 || inner_kind == GOMP_MAP_TO)
9959 return true;
9960 break;
9962 case GOMP_MAP_ALWAYS_FROM:
9963 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9964 || inner_kind == GOMP_MAP_ALLOC
9965 || inner_kind == GOMP_MAP_FROM)
9966 return true;
9967 break;
9969 case GOMP_MAP_TO:
9970 case GOMP_MAP_FROM:
9971 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9972 || inner_kind == GOMP_MAP_ALLOC)
9973 return true;
9974 break;
9976 case GOMP_MAP_ALWAYS_TOFROM:
9977 case GOMP_MAP_TOFROM:
9978 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9979 || inner_kind == GOMP_MAP_ALLOC
9980 || inner_kind == GOMP_MAP_TO
9981 || inner_kind == GOMP_MAP_FROM
9982 || inner_kind == GOMP_MAP_TOFROM)
9983 return true;
9984 break;
9986 default:
9990 error_at (loc, "data movement for component %qE is not compatible with "
9991 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner),
9992 OMP_CLAUSE_DECL (first_outer));
9994 return false;
9997 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
9998 clause dependencies we handle for now are struct element mappings and
9999 whole-struct mappings on the same directive, and duplicate clause
10000 detection. */
10002 void
10003 oacc_resolve_clause_dependencies (vec<omp_mapping_group> *groups,
10004 hash_map<tree_operand_hash_no_se,
10005 omp_mapping_group *> *grpmap)
10007 int i;
10008 omp_mapping_group *grp;
10009 hash_set<tree_operand_hash> *seen_components = NULL;
10010 hash_set<tree_operand_hash> *shown_error = NULL;
10012 FOR_EACH_VEC_ELT (*groups, i, grp)
10014 tree grp_end = grp->grp_end;
10015 tree decl = OMP_CLAUSE_DECL (grp_end);
10017 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
10019 if (DECL_P (grp_end))
10020 continue;
10022 tree c = OMP_CLAUSE_DECL (*grp->grp_start);
10023 while (TREE_CODE (c) == ARRAY_REF)
10024 c = TREE_OPERAND (c, 0);
10025 if (TREE_CODE (c) != COMPONENT_REF)
10026 continue;
10027 if (!seen_components)
10028 seen_components = new hash_set<tree_operand_hash> ();
10029 if (!shown_error)
10030 shown_error = new hash_set<tree_operand_hash> ();
10031 if (seen_components->contains (c)
10032 && !shown_error->contains (c))
10034 error_at (OMP_CLAUSE_LOCATION (grp_end),
10035 "%qE appears more than once in map clauses",
10036 OMP_CLAUSE_DECL (grp_end));
10037 shown_error->add (c);
10039 else
10040 seen_components->add (c);
10042 omp_mapping_group *struct_group;
10043 if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
10044 && *grp->grp_start == grp_end)
10046 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10047 struct_group, grp);
10048 /* Remove the whole of this mapping -- redundant. */
10049 grp->deleted = true;
10053 if (seen_components)
10054 delete seen_components;
10055 if (shown_error)
10056 delete shown_error;
10059 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10060 is linked to the previous node pointed to by INSERT_AT. */
10062 static tree *
10063 omp_siblist_insert_node_after (tree newnode, tree *insert_at)
10065 OMP_CLAUSE_CHAIN (newnode) = *insert_at;
10066 *insert_at = newnode;
10067 return &OMP_CLAUSE_CHAIN (newnode);
10070 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10071 pointed to by chain MOVE_AFTER instead. */
10073 static void
10074 omp_siblist_move_node_after (tree node, tree *old_pos, tree *move_after)
10076 gcc_assert (node == *old_pos);
10077 *old_pos = OMP_CLAUSE_CHAIN (node);
10078 OMP_CLAUSE_CHAIN (node) = *move_after;
10079 *move_after = node;
10082 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10083 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10084 new nodes are prepended to the list before splicing into the new position.
10085 Return the position we should continue scanning the list at, or NULL to
10086 stay where we were. */
10088 static tree *
10089 omp_siblist_move_nodes_after (tree *first_ptr, tree last_node,
10090 tree *move_after)
10092 if (first_ptr == move_after)
10093 return NULL;
10095 tree tmp = *first_ptr;
10096 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10097 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10098 *move_after = tmp;
10100 return first_ptr;
10103 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10104 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10105 pointer MOVE_AFTER.
10107 The latter list was previously part of the OMP clause list, and the former
10108 (prepended) part is comprised of new nodes.
10110 We start with a list of nodes starting with a struct mapping node. We
10111 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10112 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10113 the group of mapping nodes we are currently processing (from the chain
10114 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10115 we should continue processing from, or NULL to stay where we were.
10117 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10118 different) is worked through below. Here we are processing LAST_NODE, and
10119 FIRST_PTR points at the preceding mapping clause:
10121 #. mapping node chain
10122 ---------------------------------------------------
10123 A. struct_node [->B]
10124 B. comp_1 [->C]
10125 C. comp_2 [->D (move_after)]
10126 D. map_to_3 [->E]
10127 E. attach_3 [->F (first_ptr)]
10128 F. map_to_4 [->G (continue_at)]
10129 G. attach_4 (last_node) [->H]
10130 H. ...
10132 *last_new_tail = *first_ptr;
10134 I. new_node (first_new) [->F (last_new_tail)]
10136 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10138 #. mapping node chain
10139 ----------------------------------------------------
10140 A. struct_node [->B]
10141 B. comp_1 [->C]
10142 C. comp_2 [->D (move_after)]
10143 D. map_to_3 [->E]
10144 E. attach_3 [->H (first_ptr)]
10145 F. map_to_4 [->G (continue_at)]
10146 G. attach_4 (last_node) [->H]
10147 H. ...
10149 I. new_node (first_new) [->F (last_new_tail)]
10151 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10153 #. mapping node chain
10154 ---------------------------------------------------
10155 A. struct_node [->B]
10156 B. comp_1 [->C]
10157 C. comp_2 [->D (move_after)]
10158 D. map_to_3 [->E]
10159 E. attach_3 [->H (continue_at)]
10160 F. map_to_4 [->G]
10161 G. attach_4 (last_node) [->D]
10162 H. ...
10164 I. new_node (first_new) [->F (last_new_tail)]
10166 *move_after = first_new;
10168 #. mapping node chain
10169 ---------------------------------------------------
10170 A. struct_node [->B]
10171 B. comp_1 [->C]
10172 C. comp_2 [->I (move_after)]
10173 D. map_to_3 [->E]
10174 E. attach_3 [->H (continue_at)]
10175 F. map_to_4 [->G]
10176 G. attach_4 (last_node) [->D]
10177 H. ...
10178 I. new_node (first_new) [->F (last_new_tail)]
10180 or, in order:
10182 #. mapping node chain
10183 ---------------------------------------------------
10184 A. struct_node [->B]
10185 B. comp_1 [->C]
10186 C. comp_2 [->I (move_after)]
10187 I. new_node (first_new) [->F (last_new_tail)]
10188 F. map_to_4 [->G]
10189 G. attach_4 (last_node) [->D]
10190 D. map_to_3 [->E]
10191 E. attach_3 [->H (continue_at)]
10192 H. ...
10195 static tree *
10196 omp_siblist_move_concat_nodes_after (tree first_new, tree *last_new_tail,
10197 tree *first_ptr, tree last_node,
10198 tree *move_after)
10200 tree *continue_at = NULL;
10201 *last_new_tail = *first_ptr;
10202 if (first_ptr == move_after)
10203 *move_after = first_new;
10204 else
10206 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10207 continue_at = first_ptr;
10208 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10209 *move_after = first_new;
10211 return continue_at;
10214 /* Mapping struct members causes an additional set of nodes to be created,
10215 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10216 number of members being mapped, in order of ascending position (address or
10217 bitwise).
10219 We scan through the list of mapping clauses, calling this function for each
10220 struct member mapping we find, and build up the list of mappings after the
10221 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10222 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10223 moved into place in the sorted list.
10225 struct {
10226 int *a;
10227 int *b;
10228 int c;
10229 int *d;
10232 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
10233 struct.d[0:n])
10235 GOMP_MAP_STRUCT (4)
10236 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
10237 GOMP_MAP_ALLOC (struct.a)
10238 GOMP_MAP_ALLOC (struct.b)
10239 GOMP_MAP_TO (struct.c)
10240 GOMP_MAP_ALLOC (struct.d)
10243 In the case where we are mapping references to pointers, or in Fortran if
10244 we are mapping an array with a descriptor, additional nodes may be created
10245 after the struct node list also.
10247 The return code is either a pointer to the next node to process (if the
10248 list has been rearranged), else NULL to continue with the next node in the
10249 original list. */
10251 static tree *
10252 omp_accumulate_sibling_list (enum omp_region_type region_type,
10253 enum tree_code code,
10254 hash_map<tree_operand_hash, tree>
10255 *&struct_map_to_clause, tree *grp_start_p,
10256 tree grp_end, tree *inner)
10258 poly_offset_int coffset;
10259 poly_int64 cbitpos;
10260 tree ocd = OMP_CLAUSE_DECL (grp_end);
10261 bool openmp = !(region_type & ORT_ACC);
10262 tree *continue_at = NULL;
10264 while (TREE_CODE (ocd) == ARRAY_REF)
10265 ocd = TREE_OPERAND (ocd, 0);
10267 if (TREE_CODE (ocd) == INDIRECT_REF)
10268 ocd = TREE_OPERAND (ocd, 0);
10270 tree base = extract_base_bit_offset (ocd, &cbitpos, &coffset);
10272 bool ptr = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ALWAYS_POINTER);
10273 bool attach_detach = ((OMP_CLAUSE_MAP_KIND (grp_end)
10274 == GOMP_MAP_ATTACH_DETACH)
10275 || (OMP_CLAUSE_MAP_KIND (grp_end)
10276 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
10277 bool attach = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ATTACH
10278 || OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_DETACH);
10280 /* FIXME: If we're not mapping the base pointer in some other clause on this
10281 directive, I think we want to create ALLOC/RELEASE here -- i.e. not
10282 early-exit. */
10283 if (openmp && attach_detach)
10284 return NULL;
10286 if (!struct_map_to_clause || struct_map_to_clause->get (base) == NULL)
10288 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
10289 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT : GOMP_MAP_STRUCT;
10291 OMP_CLAUSE_SET_MAP_KIND (l, k);
10293 OMP_CLAUSE_DECL (l) = unshare_expr (base);
10295 OMP_CLAUSE_SIZE (l)
10296 = (!attach ? size_int (1)
10297 : (DECL_P (OMP_CLAUSE_DECL (l))
10298 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
10299 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l)))));
10300 if (struct_map_to_clause == NULL)
10301 struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
10302 struct_map_to_clause->put (base, l);
10304 if (ptr || attach_detach)
10306 tree extra_node;
10307 tree alloc_node
10308 = build_omp_struct_comp_nodes (code, *grp_start_p, grp_end,
10309 &extra_node);
10310 OMP_CLAUSE_CHAIN (l) = alloc_node;
10312 tree *insert_node_pos = grp_start_p;
10314 if (extra_node)
10316 OMP_CLAUSE_CHAIN (extra_node) = *insert_node_pos;
10317 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10319 else
10320 OMP_CLAUSE_CHAIN (alloc_node) = *insert_node_pos;
10322 *insert_node_pos = l;
10324 else
10326 gcc_assert (*grp_start_p == grp_end);
10327 grp_start_p = omp_siblist_insert_node_after (l, grp_start_p);
10330 tree noind = omp_strip_indirections (base);
10332 if (!openmp
10333 && (region_type & ORT_TARGET)
10334 && TREE_CODE (noind) == COMPONENT_REF)
10336 /* The base for this component access is a struct component access
10337 itself. Insert a node to be processed on the next iteration of
10338 our caller's loop, which will subsequently be turned into a new,
10339 inner GOMP_MAP_STRUCT mapping.
10341 We need to do this else the non-DECL_P base won't be
10342 rewritten correctly in the offloaded region. */
10343 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10344 OMP_CLAUSE_MAP);
10345 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FORCE_PRESENT);
10346 OMP_CLAUSE_DECL (c2) = unshare_expr (noind);
10347 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (TREE_TYPE (noind));
10348 *inner = c2;
10349 return NULL;
10352 tree sdecl = omp_strip_components_and_deref (base);
10354 if (POINTER_TYPE_P (TREE_TYPE (sdecl)) && (region_type & ORT_TARGET))
10356 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10357 OMP_CLAUSE_MAP);
10358 bool base_ref
10359 = (TREE_CODE (base) == INDIRECT_REF
10360 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
10361 == REFERENCE_TYPE)
10362 || ((TREE_CODE (TREE_OPERAND (base, 0))
10363 == INDIRECT_REF)
10364 && (TREE_CODE (TREE_TYPE (TREE_OPERAND
10365 (TREE_OPERAND (base, 0), 0)))
10366 == REFERENCE_TYPE))));
10367 enum gomp_map_kind mkind = base_ref ? GOMP_MAP_FIRSTPRIVATE_REFERENCE
10368 : GOMP_MAP_FIRSTPRIVATE_POINTER;
10369 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
10370 OMP_CLAUSE_DECL (c2) = sdecl;
10371 tree baddr = build_fold_addr_expr (base);
10372 baddr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10373 ptrdiff_type_node, baddr);
10374 /* This isn't going to be good enough when we add support for more
10375 complicated lvalue expressions. FIXME. */
10376 if (TREE_CODE (TREE_TYPE (sdecl)) == REFERENCE_TYPE
10377 && TREE_CODE (TREE_TYPE (TREE_TYPE (sdecl))) == POINTER_TYPE)
10378 sdecl = build_simple_mem_ref (sdecl);
10379 tree decladdr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10380 ptrdiff_type_node, sdecl);
10381 OMP_CLAUSE_SIZE (c2)
10382 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
10383 ptrdiff_type_node, baddr, decladdr);
10384 /* Insert after struct node. */
10385 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
10386 OMP_CLAUSE_CHAIN (l) = c2;
10389 return NULL;
10391 else if (struct_map_to_clause)
10393 tree *osc = struct_map_to_clause->get (base);
10394 tree *sc = NULL, *scp = NULL;
10395 sc = &OMP_CLAUSE_CHAIN (*osc);
10396 /* The struct mapping might be immediately followed by a
10397 FIRSTPRIVATE_POINTER and/or FIRSTPRIVATE_REFERENCE -- if it's an
10398 indirect access or a reference, or both. (This added node is removed
10399 in omp-low.c after it has been processed there.) */
10400 if (*sc != grp_end
10401 && (OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_POINTER
10402 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10403 sc = &OMP_CLAUSE_CHAIN (*sc);
10404 for (; *sc != grp_end; sc = &OMP_CLAUSE_CHAIN (*sc))
10405 if ((ptr || attach_detach) && sc == grp_start_p)
10406 break;
10407 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) != COMPONENT_REF
10408 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != INDIRECT_REF
10409 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != ARRAY_REF)
10410 break;
10411 else
10413 tree sc_decl = OMP_CLAUSE_DECL (*sc);
10414 poly_offset_int offset;
10415 poly_int64 bitpos;
10417 if (TREE_CODE (sc_decl) == ARRAY_REF)
10419 while (TREE_CODE (sc_decl) == ARRAY_REF)
10420 sc_decl = TREE_OPERAND (sc_decl, 0);
10421 if (TREE_CODE (sc_decl) != COMPONENT_REF
10422 || TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
10423 break;
10425 else if (TREE_CODE (sc_decl) == INDIRECT_REF
10426 && TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
10427 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
10428 == REFERENCE_TYPE))
10429 sc_decl = TREE_OPERAND (sc_decl, 0);
10431 tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset);
10432 if (!base2 || !operand_equal_p (base2, base, 0))
10433 break;
10434 if (scp)
10435 continue;
10436 if (maybe_lt (coffset, offset)
10437 || (known_eq (coffset, offset)
10438 && maybe_lt (cbitpos, bitpos)))
10440 if (ptr || attach_detach)
10441 scp = sc;
10442 else
10443 break;
10447 if (!attach)
10448 OMP_CLAUSE_SIZE (*osc)
10449 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
10450 if (ptr || attach_detach)
10452 tree cl = NULL_TREE, extra_node;
10453 tree alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
10454 grp_end, &extra_node);
10455 tree *tail_chain = NULL;
10457 /* Here, we have:
10459 grp_end : the last (or only) node in this group.
10460 grp_start_p : pointer to the first node in a pointer mapping group
10461 up to and including GRP_END.
10462 sc : pointer to the chain for the end of the struct component
10463 list.
10464 scp : pointer to the chain for the sorted position at which we
10465 should insert in the middle of the struct component list
10466 (else NULL to insert at end).
10467 alloc_node : the "alloc" node for the structure (pointer-type)
10468 component. We insert at SCP (if present), else SC
10469 (the end of the struct component list).
10470 extra_node : a newly-synthesized node for an additional indirect
10471 pointer mapping or a Fortran pointer set, if needed.
10472 cl : first node to prepend before grp_start_p.
10473 tail_chain : pointer to chain of last prepended node.
10475 The general idea is we move the nodes for this struct mapping
10476 together: the alloc node goes into the sorted list directly after
10477 the struct mapping, and any extra nodes (together with the nodes
10478 mapping arrays pointed to by struct components) get moved after
10479 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
10480 the end of the struct component mapping list. It's important that
10481 the alloc_node comes first in that case because it's part of the
10482 sorted component mapping list (but subsequent nodes are not!). */
10484 if (scp)
10485 omp_siblist_insert_node_after (alloc_node, scp);
10487 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
10488 already inserted it) and the extra_node (if it is present). The
10489 list can be empty if we added alloc_node above and there is no
10490 extra node. */
10491 if (scp && extra_node)
10493 cl = extra_node;
10494 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10496 else if (extra_node)
10498 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10499 cl = alloc_node;
10500 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10502 else if (!scp)
10504 cl = alloc_node;
10505 tail_chain = &OMP_CLAUSE_CHAIN (alloc_node);
10508 continue_at
10509 = cl ? omp_siblist_move_concat_nodes_after (cl, tail_chain,
10510 grp_start_p, grp_end,
10512 : omp_siblist_move_nodes_after (grp_start_p, grp_end, sc);
10514 else if (*sc != grp_end)
10516 gcc_assert (*grp_start_p == grp_end);
10518 /* We are moving the current node back to a previous struct node:
10519 the node that used to point to the current node will now point to
10520 the next node. */
10521 continue_at = grp_start_p;
10522 /* In the non-pointer case, the mapping clause itself is moved into
10523 the correct position in the struct component list, which in this
10524 case is just SC. */
10525 omp_siblist_move_node_after (*grp_start_p, grp_start_p, sc);
10528 return continue_at;
10531 /* Scan through GROUPS, and create sorted structure sibling lists without
10532 gimplifying. */
10534 static bool
10535 omp_build_struct_sibling_lists (enum tree_code code,
10536 enum omp_region_type region_type,
10537 vec<omp_mapping_group> *groups,
10538 hash_map<tree_operand_hash_no_se,
10539 omp_mapping_group *> **grpmap,
10540 tree *list_p)
10542 unsigned i;
10543 omp_mapping_group *grp;
10544 hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
10545 bool success = true;
10546 tree *new_next = NULL;
10547 tree *tail = &OMP_CLAUSE_CHAIN ((*groups)[groups->length () - 1].grp_end);
10548 auto_vec<omp_mapping_group> pre_hwm_groups;
10550 FOR_EACH_VEC_ELT (*groups, i, grp)
10552 tree c = grp->grp_end;
10553 tree decl = OMP_CLAUSE_DECL (c);
10554 tree grp_end = grp->grp_end;
10555 tree sentinel = OMP_CLAUSE_CHAIN (grp_end);
10557 if (new_next)
10558 grp->grp_start = new_next;
10560 new_next = NULL;
10562 tree *grp_start_p = grp->grp_start;
10564 if (DECL_P (decl))
10565 continue;
10567 /* Skip groups we marked for deletion in
10568 oacc_resolve_clause_dependencies. */
10569 if (grp->deleted)
10570 continue;
10572 if (OMP_CLAUSE_CHAIN (*grp_start_p)
10573 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
10575 /* Don't process an array descriptor that isn't inside a derived type
10576 as a struct (the GOMP_MAP_POINTER following will have the form
10577 "var.data", but such mappings are handled specially). */
10578 tree grpmid = OMP_CLAUSE_CHAIN (*grp_start_p);
10579 if (OMP_CLAUSE_CODE (grpmid) == OMP_CLAUSE_MAP
10580 && OMP_CLAUSE_MAP_KIND (grpmid) == GOMP_MAP_TO_PSET
10581 && DECL_P (OMP_CLAUSE_DECL (grpmid)))
10582 continue;
10585 tree d = decl;
10586 if (TREE_CODE (d) == ARRAY_REF)
10588 while (TREE_CODE (d) == ARRAY_REF)
10589 d = TREE_OPERAND (d, 0);
10590 if (TREE_CODE (d) == COMPONENT_REF
10591 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
10592 decl = d;
10594 if (d == decl
10595 && TREE_CODE (decl) == INDIRECT_REF
10596 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10597 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10598 == REFERENCE_TYPE)
10599 && (OMP_CLAUSE_MAP_KIND (c)
10600 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
10601 decl = TREE_OPERAND (decl, 0);
10603 STRIP_NOPS (decl);
10605 if (TREE_CODE (decl) != COMPONENT_REF)
10606 continue;
10608 /* If we're mapping the whole struct in another node, skip adding this
10609 node to a sibling list. */
10610 omp_mapping_group *wholestruct;
10611 if (omp_mapped_by_containing_struct (*grpmap, OMP_CLAUSE_DECL (c),
10612 &wholestruct))
10614 if (!(region_type & ORT_ACC)
10615 && *grp_start_p == grp_end)
10616 /* Remove the whole of this mapping -- redundant. */
10617 grp->deleted = true;
10619 continue;
10622 if (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
10623 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
10624 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
10625 && code != OACC_UPDATE
10626 && code != OMP_TARGET_UPDATE)
10628 if (error_operand_p (decl))
10630 success = false;
10631 goto error_out;
10634 tree stype = TREE_TYPE (decl);
10635 if (TREE_CODE (stype) == REFERENCE_TYPE)
10636 stype = TREE_TYPE (stype);
10637 if (TYPE_SIZE_UNIT (stype) == NULL
10638 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
10640 error_at (OMP_CLAUSE_LOCATION (c),
10641 "mapping field %qE of variable length "
10642 "structure", OMP_CLAUSE_DECL (c));
10643 success = false;
10644 goto error_out;
10647 tree inner = NULL_TREE;
10649 new_next
10650 = omp_accumulate_sibling_list (region_type, code,
10651 struct_map_to_clause, grp_start_p,
10652 grp_end, &inner);
10654 if (inner)
10656 if (new_next && *new_next == NULL_TREE)
10657 *new_next = inner;
10658 else
10659 *tail = inner;
10661 OMP_CLAUSE_CHAIN (inner) = NULL_TREE;
10662 omp_mapping_group newgrp;
10663 newgrp.grp_start = new_next ? new_next : tail;
10664 newgrp.grp_end = inner;
10665 newgrp.mark = UNVISITED;
10666 newgrp.sibling = NULL;
10667 newgrp.deleted = false;
10668 newgrp.next = NULL;
10669 groups->safe_push (newgrp);
10671 /* !!! Growing GROUPS might invalidate the pointers in the group
10672 map. Rebuild it here. This is a bit inefficient, but
10673 shouldn't happen very often. */
10674 delete (*grpmap);
10675 *grpmap
10676 = omp_reindex_mapping_groups (list_p, groups, &pre_hwm_groups,
10677 sentinel);
10679 tail = &OMP_CLAUSE_CHAIN (inner);
10684 /* Delete groups marked for deletion above. At this point the order of the
10685 groups may no longer correspond to the order of the underlying list,
10686 which complicates this a little. First clear out OMP_CLAUSE_DECL for
10687 deleted nodes... */
10689 FOR_EACH_VEC_ELT (*groups, i, grp)
10690 if (grp->deleted)
10691 for (tree d = *grp->grp_start;
10692 d != OMP_CLAUSE_CHAIN (grp->grp_end);
10693 d = OMP_CLAUSE_CHAIN (d))
10694 OMP_CLAUSE_DECL (d) = NULL_TREE;
10696 /* ...then sweep through the list removing the now-empty nodes. */
10698 tail = list_p;
10699 while (*tail)
10701 if (OMP_CLAUSE_CODE (*tail) == OMP_CLAUSE_MAP
10702 && OMP_CLAUSE_DECL (*tail) == NULL_TREE)
10703 *tail = OMP_CLAUSE_CHAIN (*tail);
10704 else
10705 tail = &OMP_CLAUSE_CHAIN (*tail);
10708 error_out:
10709 if (struct_map_to_clause)
10710 delete struct_map_to_clause;
10712 return success;
10715 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
10716 and previous omp contexts. */
10718 static void
10719 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
10720 enum omp_region_type region_type,
10721 enum tree_code code)
10723 struct gimplify_omp_ctx *ctx, *outer_ctx;
10724 tree c;
10725 tree *prev_list_p = NULL, *orig_list_p = list_p;
10726 int handled_depend_iterators = -1;
10727 int nowait = -1;
10729 ctx = new_omp_context (region_type);
10730 ctx->code = code;
10731 outer_ctx = ctx->outer_context;
10732 if (code == OMP_TARGET)
10734 if (!lang_GNU_Fortran ())
10735 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
10736 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
10737 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
10738 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
10740 if (!lang_GNU_Fortran ())
10741 switch (code)
10743 case OMP_TARGET:
10744 case OMP_TARGET_DATA:
10745 case OMP_TARGET_ENTER_DATA:
10746 case OMP_TARGET_EXIT_DATA:
10747 case OACC_DECLARE:
10748 case OACC_HOST_DATA:
10749 case OACC_PARALLEL:
10750 case OACC_KERNELS:
10751 ctx->target_firstprivatize_array_bases = true;
10752 default:
10753 break;
10756 if (code == OMP_TARGET
10757 || code == OMP_TARGET_DATA
10758 || code == OMP_TARGET_ENTER_DATA
10759 || code == OMP_TARGET_EXIT_DATA)
10761 vec<omp_mapping_group> *groups;
10762 groups = omp_gather_mapping_groups (list_p);
10763 if (groups)
10765 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
10766 grpmap = omp_index_mapping_groups (groups);
10768 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
10769 list_p);
10771 omp_mapping_group *outlist = NULL;
10773 /* Topological sorting may fail if we have duplicate nodes, which
10774 we should have detected and shown an error for already. Skip
10775 sorting in that case. */
10776 if (seen_error ())
10777 goto failure;
10779 delete grpmap;
10780 delete groups;
10782 /* Rebuild now we have struct sibling lists. */
10783 groups = omp_gather_mapping_groups (list_p);
10784 grpmap = omp_index_mapping_groups (groups);
10786 outlist = omp_tsort_mapping_groups (groups, grpmap);
10787 outlist = omp_segregate_mapping_groups (outlist);
10788 list_p = omp_reorder_mapping_groups (groups, outlist, list_p);
10790 failure:
10791 delete grpmap;
10792 delete groups;
10795 else if (region_type & ORT_ACC)
10797 vec<omp_mapping_group> *groups;
10798 groups = omp_gather_mapping_groups (list_p);
10799 if (groups)
10801 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
10802 grpmap = omp_index_mapping_groups (groups);
10804 oacc_resolve_clause_dependencies (groups, grpmap);
10805 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
10806 list_p);
10808 delete groups;
10809 delete grpmap;
10813 while ((c = *list_p) != NULL)
10815 bool remove = false;
10816 bool notice_outer = true;
10817 const char *check_non_private = NULL;
10818 unsigned int flags;
10819 tree decl;
10821 switch (OMP_CLAUSE_CODE (c))
10823 case OMP_CLAUSE_PRIVATE:
10824 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
10825 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
10827 flags |= GOVD_PRIVATE_OUTER_REF;
10828 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
10830 else
10831 notice_outer = false;
10832 goto do_add;
10833 case OMP_CLAUSE_SHARED:
10834 flags = GOVD_SHARED | GOVD_EXPLICIT;
10835 goto do_add;
10836 case OMP_CLAUSE_FIRSTPRIVATE:
10837 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
10838 check_non_private = "firstprivate";
10839 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
10841 gcc_assert (code == OMP_TARGET);
10842 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
10844 goto do_add;
10845 case OMP_CLAUSE_LASTPRIVATE:
10846 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10847 switch (code)
10849 case OMP_DISTRIBUTE:
10850 error_at (OMP_CLAUSE_LOCATION (c),
10851 "conditional %<lastprivate%> clause on "
10852 "%qs construct", "distribute");
10853 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10854 break;
10855 case OMP_TASKLOOP:
10856 error_at (OMP_CLAUSE_LOCATION (c),
10857 "conditional %<lastprivate%> clause on "
10858 "%qs construct", "taskloop");
10859 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10860 break;
10861 default:
10862 break;
10864 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
10865 if (code != OMP_LOOP)
10866 check_non_private = "lastprivate";
10867 decl = OMP_CLAUSE_DECL (c);
10868 if (error_operand_p (decl))
10869 goto do_add;
10870 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
10871 && !lang_hooks.decls.omp_scalar_p (decl, true))
10873 error_at (OMP_CLAUSE_LOCATION (c),
10874 "non-scalar variable %qD in conditional "
10875 "%<lastprivate%> clause", decl);
10876 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10878 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10879 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
10880 omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
10881 false);
10882 goto do_add;
10883 case OMP_CLAUSE_REDUCTION:
10884 if (OMP_CLAUSE_REDUCTION_TASK (c))
10886 if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
10888 if (nowait == -1)
10889 nowait = omp_find_clause (*list_p,
10890 OMP_CLAUSE_NOWAIT) != NULL_TREE;
10891 if (nowait
10892 && (outer_ctx == NULL
10893 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
10895 error_at (OMP_CLAUSE_LOCATION (c),
10896 "%<task%> reduction modifier on a construct "
10897 "with a %<nowait%> clause");
10898 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
10901 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
10903 error_at (OMP_CLAUSE_LOCATION (c),
10904 "invalid %<task%> reduction modifier on construct "
10905 "other than %<parallel%>, %qs, %<sections%> or "
10906 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
10907 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
10910 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10911 switch (code)
10913 case OMP_SECTIONS:
10914 error_at (OMP_CLAUSE_LOCATION (c),
10915 "%<inscan%> %<reduction%> clause on "
10916 "%qs construct", "sections");
10917 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10918 break;
10919 case OMP_PARALLEL:
10920 error_at (OMP_CLAUSE_LOCATION (c),
10921 "%<inscan%> %<reduction%> clause on "
10922 "%qs construct", "parallel");
10923 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10924 break;
10925 case OMP_TEAMS:
10926 error_at (OMP_CLAUSE_LOCATION (c),
10927 "%<inscan%> %<reduction%> clause on "
10928 "%qs construct", "teams");
10929 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10930 break;
10931 case OMP_TASKLOOP:
10932 error_at (OMP_CLAUSE_LOCATION (c),
10933 "%<inscan%> %<reduction%> clause on "
10934 "%qs construct", "taskloop");
10935 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10936 break;
10937 case OMP_SCOPE:
10938 error_at (OMP_CLAUSE_LOCATION (c),
10939 "%<inscan%> %<reduction%> clause on "
10940 "%qs construct", "scope");
10941 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10942 break;
10943 default:
10944 break;
10946 /* FALLTHRU */
10947 case OMP_CLAUSE_IN_REDUCTION:
10948 case OMP_CLAUSE_TASK_REDUCTION:
10949 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
10950 /* OpenACC permits reductions on private variables. */
10951 if (!(region_type & ORT_ACC)
10952 /* taskgroup is actually not a worksharing region. */
10953 && code != OMP_TASKGROUP)
10954 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
10955 decl = OMP_CLAUSE_DECL (c);
10956 if (TREE_CODE (decl) == MEM_REF)
10958 tree type = TREE_TYPE (decl);
10959 bool saved_into_ssa = gimplify_ctxp->into_ssa;
10960 gimplify_ctxp->into_ssa = false;
10961 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
10962 NULL, is_gimple_val, fb_rvalue, false)
10963 == GS_ERROR)
10965 gimplify_ctxp->into_ssa = saved_into_ssa;
10966 remove = true;
10967 break;
10969 gimplify_ctxp->into_ssa = saved_into_ssa;
10970 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
10971 if (DECL_P (v))
10973 omp_firstprivatize_variable (ctx, v);
10974 omp_notice_variable (ctx, v, true);
10976 decl = TREE_OPERAND (decl, 0);
10977 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10979 gimplify_ctxp->into_ssa = false;
10980 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
10981 NULL, is_gimple_val, fb_rvalue, false)
10982 == GS_ERROR)
10984 gimplify_ctxp->into_ssa = saved_into_ssa;
10985 remove = true;
10986 break;
10988 gimplify_ctxp->into_ssa = saved_into_ssa;
10989 v = TREE_OPERAND (decl, 1);
10990 if (DECL_P (v))
10992 omp_firstprivatize_variable (ctx, v);
10993 omp_notice_variable (ctx, v, true);
10995 decl = TREE_OPERAND (decl, 0);
10997 if (TREE_CODE (decl) == ADDR_EXPR
10998 || TREE_CODE (decl) == INDIRECT_REF)
10999 decl = TREE_OPERAND (decl, 0);
11001 goto do_add_decl;
11002 case OMP_CLAUSE_LINEAR:
11003 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
11004 is_gimple_val, fb_rvalue) == GS_ERROR)
11006 remove = true;
11007 break;
11009 else
11011 if (code == OMP_SIMD
11012 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11014 struct gimplify_omp_ctx *octx = outer_ctx;
11015 if (octx
11016 && octx->region_type == ORT_WORKSHARE
11017 && octx->combined_loop
11018 && !octx->distribute)
11020 if (octx->outer_context
11021 && (octx->outer_context->region_type
11022 == ORT_COMBINED_PARALLEL))
11023 octx = octx->outer_context->outer_context;
11024 else
11025 octx = octx->outer_context;
11027 if (octx
11028 && octx->region_type == ORT_WORKSHARE
11029 && octx->combined_loop
11030 && octx->distribute)
11032 error_at (OMP_CLAUSE_LOCATION (c),
11033 "%<linear%> clause for variable other than "
11034 "loop iterator specified on construct "
11035 "combined with %<distribute%>");
11036 remove = true;
11037 break;
11040 /* For combined #pragma omp parallel for simd, need to put
11041 lastprivate and perhaps firstprivate too on the
11042 parallel. Similarly for #pragma omp for simd. */
11043 struct gimplify_omp_ctx *octx = outer_ctx;
11044 bool taskloop_seen = false;
11045 decl = NULL_TREE;
11048 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11049 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11050 break;
11051 decl = OMP_CLAUSE_DECL (c);
11052 if (error_operand_p (decl))
11054 decl = NULL_TREE;
11055 break;
11057 flags = GOVD_SEEN;
11058 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11059 flags |= GOVD_FIRSTPRIVATE;
11060 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11061 flags |= GOVD_LASTPRIVATE;
11062 if (octx
11063 && octx->region_type == ORT_WORKSHARE
11064 && octx->combined_loop)
11066 if (octx->outer_context
11067 && (octx->outer_context->region_type
11068 == ORT_COMBINED_PARALLEL))
11069 octx = octx->outer_context;
11070 else if (omp_check_private (octx, decl, false))
11071 break;
11073 else if (octx
11074 && (octx->region_type & ORT_TASK) != 0
11075 && octx->combined_loop)
11076 taskloop_seen = true;
11077 else if (octx
11078 && octx->region_type == ORT_COMBINED_PARALLEL
11079 && ((ctx->region_type == ORT_WORKSHARE
11080 && octx == outer_ctx)
11081 || taskloop_seen))
11082 flags = GOVD_SEEN | GOVD_SHARED;
11083 else if (octx
11084 && ((octx->region_type & ORT_COMBINED_TEAMS)
11085 == ORT_COMBINED_TEAMS))
11086 flags = GOVD_SEEN | GOVD_SHARED;
11087 else if (octx
11088 && octx->region_type == ORT_COMBINED_TARGET)
11090 if (flags & GOVD_LASTPRIVATE)
11091 flags = GOVD_SEEN | GOVD_MAP;
11093 else
11094 break;
11095 splay_tree_node on
11096 = splay_tree_lookup (octx->variables,
11097 (splay_tree_key) decl);
11098 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
11100 octx = NULL;
11101 break;
11103 omp_add_variable (octx, decl, flags);
11104 if (octx->outer_context == NULL)
11105 break;
11106 octx = octx->outer_context;
11108 while (1);
11109 if (octx
11110 && decl
11111 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11112 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
11113 omp_notice_variable (octx, decl, true);
11115 flags = GOVD_LINEAR | GOVD_EXPLICIT;
11116 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11117 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11119 notice_outer = false;
11120 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11122 goto do_add;
11124 case OMP_CLAUSE_MAP:
11125 decl = OMP_CLAUSE_DECL (c);
11126 if (error_operand_p (decl))
11127 remove = true;
11128 switch (code)
11130 case OMP_TARGET:
11131 break;
11132 case OACC_DATA:
11133 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
11134 break;
11135 /* FALLTHRU */
11136 case OMP_TARGET_DATA:
11137 case OMP_TARGET_ENTER_DATA:
11138 case OMP_TARGET_EXIT_DATA:
11139 case OACC_ENTER_DATA:
11140 case OACC_EXIT_DATA:
11141 case OACC_HOST_DATA:
11142 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11143 || (OMP_CLAUSE_MAP_KIND (c)
11144 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11145 /* For target {,enter ,exit }data only the array slice is
11146 mapped, but not the pointer to it. */
11147 remove = true;
11148 break;
11149 default:
11150 break;
11152 /* For Fortran, not only the pointer to the data is mapped but also
11153 the address of the pointer, the array descriptor etc.; for
11154 'exit data' - and in particular for 'delete:' - having an 'alloc:'
11155 does not make sense. Likewise, for 'update' only transferring the
11156 data itself is needed as the rest has been handled in previous
11157 directives. However, for 'exit data', the array descriptor needs
11158 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
11160 NOTE: Generally, it is not safe to perform "enter data" operations
11161 on arrays where the data *or the descriptor* may go out of scope
11162 before a corresponding "exit data" operation -- and such a
11163 descriptor may be synthesized temporarily, e.g. to pass an
11164 explicit-shape array to a function expecting an assumed-shape
11165 argument. Performing "enter data" inside the called function
11166 would thus be problematic. */
11167 if (code == OMP_TARGET_EXIT_DATA
11168 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
11169 OMP_CLAUSE_SET_MAP_KIND (c, OMP_CLAUSE_MAP_KIND (*prev_list_p)
11170 == GOMP_MAP_DELETE
11171 ? GOMP_MAP_DELETE : GOMP_MAP_RELEASE);
11172 else if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
11173 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11174 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
11175 remove = true;
11177 if (remove)
11178 break;
11179 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
11181 struct gimplify_omp_ctx *octx;
11182 for (octx = outer_ctx; octx; octx = octx->outer_context)
11184 if (octx->region_type != ORT_ACC_HOST_DATA)
11185 break;
11186 splay_tree_node n2
11187 = splay_tree_lookup (octx->variables,
11188 (splay_tree_key) decl);
11189 if (n2)
11190 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
11191 "declared in enclosing %<host_data%> region",
11192 DECL_NAME (decl));
11195 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11196 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11197 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11198 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11199 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11201 remove = true;
11202 break;
11204 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11205 || (OMP_CLAUSE_MAP_KIND (c)
11206 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11207 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11208 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
11210 OMP_CLAUSE_SIZE (c)
11211 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
11212 false);
11213 if ((region_type & ORT_TARGET) != 0)
11214 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
11215 GOVD_FIRSTPRIVATE | GOVD_SEEN);
11218 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
11220 tree base = omp_strip_components_and_deref (decl);
11221 if (DECL_P (base))
11223 decl = base;
11224 splay_tree_node n
11225 = splay_tree_lookup (ctx->variables,
11226 (splay_tree_key) decl);
11227 if (seen_error ()
11228 && n
11229 && (n->value & (GOVD_MAP | GOVD_FIRSTPRIVATE)) != 0)
11231 remove = true;
11232 break;
11234 flags = GOVD_MAP | GOVD_EXPLICIT;
11236 goto do_add_decl;
11240 if (TREE_CODE (decl) == TARGET_EXPR)
11242 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11243 is_gimple_lvalue, fb_lvalue)
11244 == GS_ERROR)
11245 remove = true;
11247 else if (!DECL_P (decl))
11249 tree d = decl, *pd;
11250 if (TREE_CODE (d) == ARRAY_REF)
11252 while (TREE_CODE (d) == ARRAY_REF)
11253 d = TREE_OPERAND (d, 0);
11254 if (TREE_CODE (d) == COMPONENT_REF
11255 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
11256 decl = d;
11258 pd = &OMP_CLAUSE_DECL (c);
11259 if (d == decl
11260 && TREE_CODE (decl) == INDIRECT_REF
11261 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
11262 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
11263 == REFERENCE_TYPE)
11264 && (OMP_CLAUSE_MAP_KIND (c)
11265 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
11267 pd = &TREE_OPERAND (decl, 0);
11268 decl = TREE_OPERAND (decl, 0);
11270 /* An "attach/detach" operation on an update directive should
11271 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
11272 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
11273 depends on the previous mapping. */
11274 if (code == OACC_UPDATE
11275 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11276 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
11278 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11280 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11281 == ARRAY_TYPE)
11282 remove = true;
11283 else
11285 gomp_map_kind k = ((code == OACC_EXIT_DATA
11286 || code == OMP_TARGET_EXIT_DATA)
11287 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
11288 OMP_CLAUSE_SET_MAP_KIND (c, k);
11292 tree cref = decl;
11294 while (TREE_CODE (cref) == ARRAY_REF)
11295 cref = TREE_OPERAND (cref, 0);
11297 if (TREE_CODE (cref) == INDIRECT_REF)
11298 cref = TREE_OPERAND (cref, 0);
11300 if (TREE_CODE (cref) == COMPONENT_REF)
11302 tree base = cref;
11303 while (base && !DECL_P (base))
11305 tree innerbase = omp_get_base_pointer (base);
11306 if (!innerbase)
11307 break;
11308 base = innerbase;
11310 if (base
11311 && DECL_P (base)
11312 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
11313 && POINTER_TYPE_P (TREE_TYPE (base)))
11315 splay_tree_node n
11316 = splay_tree_lookup (ctx->variables,
11317 (splay_tree_key) base);
11318 n->value |= GOVD_SEEN;
11322 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
11324 /* Don't gimplify *pd fully at this point, as the base
11325 will need to be adjusted during omp lowering. */
11326 auto_vec<tree, 10> expr_stack;
11327 tree *p = pd;
11328 while (handled_component_p (*p)
11329 || TREE_CODE (*p) == INDIRECT_REF
11330 || TREE_CODE (*p) == ADDR_EXPR
11331 || TREE_CODE (*p) == MEM_REF
11332 || TREE_CODE (*p) == NON_LVALUE_EXPR)
11334 expr_stack.safe_push (*p);
11335 p = &TREE_OPERAND (*p, 0);
11337 for (int i = expr_stack.length () - 1; i >= 0; i--)
11339 tree t = expr_stack[i];
11340 if (TREE_CODE (t) == ARRAY_REF
11341 || TREE_CODE (t) == ARRAY_RANGE_REF)
11343 if (TREE_OPERAND (t, 2) == NULL_TREE)
11345 tree low = unshare_expr (array_ref_low_bound (t));
11346 if (!is_gimple_min_invariant (low))
11348 TREE_OPERAND (t, 2) = low;
11349 if (gimplify_expr (&TREE_OPERAND (t, 2),
11350 pre_p, NULL,
11351 is_gimple_reg,
11352 fb_rvalue) == GS_ERROR)
11353 remove = true;
11356 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11357 NULL, is_gimple_reg,
11358 fb_rvalue) == GS_ERROR)
11359 remove = true;
11360 if (TREE_OPERAND (t, 3) == NULL_TREE)
11362 tree elmt_size = array_ref_element_size (t);
11363 if (!is_gimple_min_invariant (elmt_size))
11365 elmt_size = unshare_expr (elmt_size);
11366 tree elmt_type
11367 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
11368 0)));
11369 tree factor
11370 = size_int (TYPE_ALIGN_UNIT (elmt_type));
11371 elmt_size
11372 = size_binop (EXACT_DIV_EXPR, elmt_size,
11373 factor);
11374 TREE_OPERAND (t, 3) = elmt_size;
11375 if (gimplify_expr (&TREE_OPERAND (t, 3),
11376 pre_p, NULL,
11377 is_gimple_reg,
11378 fb_rvalue) == GS_ERROR)
11379 remove = true;
11382 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
11383 NULL, is_gimple_reg,
11384 fb_rvalue) == GS_ERROR)
11385 remove = true;
11387 else if (TREE_CODE (t) == COMPONENT_REF)
11389 if (TREE_OPERAND (t, 2) == NULL_TREE)
11391 tree offset = component_ref_field_offset (t);
11392 if (!is_gimple_min_invariant (offset))
11394 offset = unshare_expr (offset);
11395 tree field = TREE_OPERAND (t, 1);
11396 tree factor
11397 = size_int (DECL_OFFSET_ALIGN (field)
11398 / BITS_PER_UNIT);
11399 offset = size_binop (EXACT_DIV_EXPR, offset,
11400 factor);
11401 TREE_OPERAND (t, 2) = offset;
11402 if (gimplify_expr (&TREE_OPERAND (t, 2),
11403 pre_p, NULL,
11404 is_gimple_reg,
11405 fb_rvalue) == GS_ERROR)
11406 remove = true;
11409 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11410 NULL, is_gimple_reg,
11411 fb_rvalue) == GS_ERROR)
11412 remove = true;
11415 for (; expr_stack.length () > 0; )
11417 tree t = expr_stack.pop ();
11419 if (TREE_CODE (t) == ARRAY_REF
11420 || TREE_CODE (t) == ARRAY_RANGE_REF)
11422 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
11423 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
11424 NULL, is_gimple_val,
11425 fb_rvalue) == GS_ERROR)
11426 remove = true;
11430 else if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
11431 fb_lvalue) == GS_ERROR)
11433 remove = true;
11434 break;
11437 if (!remove
11438 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
11439 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
11440 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
11441 && OMP_CLAUSE_CHAIN (c)
11442 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
11443 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11444 == GOMP_MAP_ALWAYS_POINTER)
11445 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11446 == GOMP_MAP_ATTACH_DETACH)
11447 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11448 == GOMP_MAP_TO_PSET)))
11449 prev_list_p = list_p;
11451 break;
11453 flags = GOVD_MAP | GOVD_EXPLICIT;
11454 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
11455 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
11456 flags |= GOVD_MAP_ALWAYS_TO;
11458 if ((code == OMP_TARGET
11459 || code == OMP_TARGET_DATA
11460 || code == OMP_TARGET_ENTER_DATA
11461 || code == OMP_TARGET_EXIT_DATA)
11462 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11464 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
11465 octx = octx->outer_context)
11467 splay_tree_node n
11468 = splay_tree_lookup (octx->variables,
11469 (splay_tree_key) OMP_CLAUSE_DECL (c));
11470 /* If this is contained in an outer OpenMP region as a
11471 firstprivate value, remove the attach/detach. */
11472 if (n && (n->value & GOVD_FIRSTPRIVATE))
11474 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
11475 goto do_add;
11479 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
11480 ? GOMP_MAP_DETACH
11481 : GOMP_MAP_ATTACH);
11482 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
11485 goto do_add;
11487 case OMP_CLAUSE_AFFINITY:
11488 gimplify_omp_affinity (list_p, pre_p);
11489 remove = true;
11490 break;
11491 case OMP_CLAUSE_DOACROSS:
11492 if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
11494 tree deps = OMP_CLAUSE_DECL (c);
11495 while (deps && TREE_CODE (deps) == TREE_LIST)
11497 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
11498 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
11499 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
11500 pre_p, NULL, is_gimple_val, fb_rvalue);
11501 deps = TREE_CHAIN (deps);
11504 else
11505 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c)
11506 == OMP_CLAUSE_DOACROSS_SOURCE);
11507 break;
11508 case OMP_CLAUSE_DEPEND:
11509 if (handled_depend_iterators == -1)
11510 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
11511 if (handled_depend_iterators)
11513 if (handled_depend_iterators == 2)
11514 remove = true;
11515 break;
11517 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
11519 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
11520 NULL, is_gimple_val, fb_rvalue);
11521 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
11523 if (error_operand_p (OMP_CLAUSE_DECL (c)))
11525 remove = true;
11526 break;
11528 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
11530 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
11531 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11532 is_gimple_val, fb_rvalue) == GS_ERROR)
11534 remove = true;
11535 break;
11538 if (code == OMP_TASK)
11539 ctx->has_depend = true;
11540 break;
11542 case OMP_CLAUSE_TO:
11543 case OMP_CLAUSE_FROM:
11544 case OMP_CLAUSE__CACHE_:
11545 decl = OMP_CLAUSE_DECL (c);
11546 if (error_operand_p (decl))
11548 remove = true;
11549 break;
11551 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11552 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11553 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11554 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11555 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11557 remove = true;
11558 break;
11560 if (!DECL_P (decl))
11562 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
11563 NULL, is_gimple_lvalue, fb_lvalue)
11564 == GS_ERROR)
11566 remove = true;
11567 break;
11569 break;
11571 goto do_notice;
11573 case OMP_CLAUSE_USE_DEVICE_PTR:
11574 case OMP_CLAUSE_USE_DEVICE_ADDR:
11575 flags = GOVD_EXPLICIT;
11576 goto do_add;
11578 case OMP_CLAUSE_HAS_DEVICE_ADDR:
11579 decl = OMP_CLAUSE_DECL (c);
11580 while (TREE_CODE (decl) == INDIRECT_REF
11581 || TREE_CODE (decl) == ARRAY_REF)
11582 decl = TREE_OPERAND (decl, 0);
11583 flags = GOVD_EXPLICIT;
11584 goto do_add_decl;
11586 case OMP_CLAUSE_IS_DEVICE_PTR:
11587 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
11588 goto do_add;
11590 do_add:
11591 decl = OMP_CLAUSE_DECL (c);
11592 do_add_decl:
11593 if (error_operand_p (decl))
11595 remove = true;
11596 break;
11598 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
11600 tree t = omp_member_access_dummy_var (decl);
11601 if (t)
11603 tree v = DECL_VALUE_EXPR (decl);
11604 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
11605 if (outer_ctx)
11606 omp_notice_variable (outer_ctx, t, true);
11609 if (code == OACC_DATA
11610 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11611 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
11612 flags |= GOVD_MAP_0LEN_ARRAY;
11613 omp_add_variable (ctx, decl, flags);
11614 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11615 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
11616 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
11617 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11619 struct gimplify_omp_ctx *pctx
11620 = code == OMP_TARGET ? outer_ctx : ctx;
11621 if (pctx)
11622 omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
11623 GOVD_LOCAL | GOVD_SEEN);
11624 if (pctx
11625 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
11626 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
11627 find_decl_expr,
11628 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11629 NULL) == NULL_TREE)
11630 omp_add_variable (pctx,
11631 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11632 GOVD_LOCAL | GOVD_SEEN);
11633 gimplify_omp_ctxp = pctx;
11634 push_gimplify_context ();
11636 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11637 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11639 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
11640 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
11641 pop_gimplify_context
11642 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
11643 push_gimplify_context ();
11644 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
11645 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
11646 pop_gimplify_context
11647 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
11648 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
11649 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
11651 gimplify_omp_ctxp = outer_ctx;
11653 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11654 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
11656 gimplify_omp_ctxp = ctx;
11657 push_gimplify_context ();
11658 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
11660 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11661 NULL, NULL);
11662 TREE_SIDE_EFFECTS (bind) = 1;
11663 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
11664 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
11666 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
11667 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
11668 pop_gimplify_context
11669 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
11670 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
11672 gimplify_omp_ctxp = outer_ctx;
11674 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11675 && OMP_CLAUSE_LINEAR_STMT (c))
11677 gimplify_omp_ctxp = ctx;
11678 push_gimplify_context ();
11679 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
11681 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11682 NULL, NULL);
11683 TREE_SIDE_EFFECTS (bind) = 1;
11684 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
11685 OMP_CLAUSE_LINEAR_STMT (c) = bind;
11687 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
11688 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
11689 pop_gimplify_context
11690 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
11691 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
11693 gimplify_omp_ctxp = outer_ctx;
11695 if (notice_outer)
11696 goto do_notice;
11697 break;
11699 case OMP_CLAUSE_COPYIN:
11700 case OMP_CLAUSE_COPYPRIVATE:
11701 decl = OMP_CLAUSE_DECL (c);
11702 if (error_operand_p (decl))
11704 remove = true;
11705 break;
11707 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
11708 && !remove
11709 && !omp_check_private (ctx, decl, true))
11711 remove = true;
11712 if (is_global_var (decl))
11714 if (DECL_THREAD_LOCAL_P (decl))
11715 remove = false;
11716 else if (DECL_HAS_VALUE_EXPR_P (decl))
11718 tree value = get_base_address (DECL_VALUE_EXPR (decl));
11720 if (value
11721 && DECL_P (value)
11722 && DECL_THREAD_LOCAL_P (value))
11723 remove = false;
11726 if (remove)
11727 error_at (OMP_CLAUSE_LOCATION (c),
11728 "copyprivate variable %qE is not threadprivate"
11729 " or private in outer context", DECL_NAME (decl));
11731 do_notice:
11732 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11733 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
11734 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11735 && outer_ctx
11736 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
11737 || (region_type == ORT_WORKSHARE
11738 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11739 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
11740 || code == OMP_LOOP)))
11741 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
11742 || (code == OMP_LOOP
11743 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11744 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
11745 == ORT_COMBINED_TEAMS))))
11747 splay_tree_node on
11748 = splay_tree_lookup (outer_ctx->variables,
11749 (splay_tree_key)decl);
11750 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
11752 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11753 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
11754 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
11755 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11756 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
11757 == POINTER_TYPE))))
11758 omp_firstprivatize_variable (outer_ctx, decl);
11759 else
11761 omp_add_variable (outer_ctx, decl,
11762 GOVD_SEEN | GOVD_SHARED);
11763 if (outer_ctx->outer_context)
11764 omp_notice_variable (outer_ctx->outer_context, decl,
11765 true);
11769 if (outer_ctx)
11770 omp_notice_variable (outer_ctx, decl, true);
11771 if (check_non_private
11772 && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
11773 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
11774 || decl == OMP_CLAUSE_DECL (c)
11775 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
11776 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11777 == ADDR_EXPR
11778 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11779 == POINTER_PLUS_EXPR
11780 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
11781 (OMP_CLAUSE_DECL (c), 0), 0))
11782 == ADDR_EXPR)))))
11783 && omp_check_private (ctx, decl, false))
11785 error ("%s variable %qE is private in outer context",
11786 check_non_private, DECL_NAME (decl));
11787 remove = true;
11789 break;
11791 case OMP_CLAUSE_DETACH:
11792 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
11793 goto do_add;
11795 case OMP_CLAUSE_IF:
11796 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
11797 && OMP_CLAUSE_IF_MODIFIER (c) != code)
11799 const char *p[2];
11800 for (int i = 0; i < 2; i++)
11801 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
11803 case VOID_CST: p[i] = "cancel"; break;
11804 case OMP_PARALLEL: p[i] = "parallel"; break;
11805 case OMP_SIMD: p[i] = "simd"; break;
11806 case OMP_TASK: p[i] = "task"; break;
11807 case OMP_TASKLOOP: p[i] = "taskloop"; break;
11808 case OMP_TARGET_DATA: p[i] = "target data"; break;
11809 case OMP_TARGET: p[i] = "target"; break;
11810 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
11811 case OMP_TARGET_ENTER_DATA:
11812 p[i] = "target enter data"; break;
11813 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
11814 default: gcc_unreachable ();
11816 error_at (OMP_CLAUSE_LOCATION (c),
11817 "expected %qs %<if%> clause modifier rather than %qs",
11818 p[0], p[1]);
11819 remove = true;
11821 /* Fall through. */
11823 case OMP_CLAUSE_FINAL:
11824 OMP_CLAUSE_OPERAND (c, 0)
11825 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
11826 /* Fall through. */
11828 case OMP_CLAUSE_NUM_TEAMS:
11829 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
11830 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
11831 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
11833 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
11835 remove = true;
11836 break;
11838 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
11839 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
11840 pre_p, NULL, true);
11842 /* Fall through. */
11844 case OMP_CLAUSE_SCHEDULE:
11845 case OMP_CLAUSE_NUM_THREADS:
11846 case OMP_CLAUSE_THREAD_LIMIT:
11847 case OMP_CLAUSE_DIST_SCHEDULE:
11848 case OMP_CLAUSE_DEVICE:
11849 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
11850 && OMP_CLAUSE_DEVICE_ANCESTOR (c))
11852 if (code != OMP_TARGET)
11854 error_at (OMP_CLAUSE_LOCATION (c),
11855 "%<device%> clause with %<ancestor%> is only "
11856 "allowed on %<target%> construct");
11857 remove = true;
11858 break;
11861 tree clauses = *orig_list_p;
11862 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
11863 if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
11864 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
11865 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
11866 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
11867 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
11870 error_at (OMP_CLAUSE_LOCATION (c),
11871 "with %<ancestor%>, only the %<device%>, "
11872 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
11873 "and %<map%> clauses may appear on the "
11874 "construct");
11875 remove = true;
11876 break;
11879 /* Fall through. */
11881 case OMP_CLAUSE_PRIORITY:
11882 case OMP_CLAUSE_GRAINSIZE:
11883 case OMP_CLAUSE_NUM_TASKS:
11884 case OMP_CLAUSE_FILTER:
11885 case OMP_CLAUSE_HINT:
11886 case OMP_CLAUSE_ASYNC:
11887 case OMP_CLAUSE_WAIT:
11888 case OMP_CLAUSE_NUM_GANGS:
11889 case OMP_CLAUSE_NUM_WORKERS:
11890 case OMP_CLAUSE_VECTOR_LENGTH:
11891 case OMP_CLAUSE_WORKER:
11892 case OMP_CLAUSE_VECTOR:
11893 if (OMP_CLAUSE_OPERAND (c, 0)
11894 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
11896 if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
11898 remove = true;
11899 break;
11901 /* All these clauses care about value, not a particular decl,
11902 so try to force it into a SSA_NAME or fresh temporary. */
11903 OMP_CLAUSE_OPERAND (c, 0)
11904 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
11905 pre_p, NULL, true);
11907 break;
11909 case OMP_CLAUSE_GANG:
11910 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
11911 is_gimple_val, fb_rvalue) == GS_ERROR)
11912 remove = true;
11913 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
11914 is_gimple_val, fb_rvalue) == GS_ERROR)
11915 remove = true;
11916 break;
11918 case OMP_CLAUSE_NOWAIT:
11919 nowait = 1;
11920 break;
11922 case OMP_CLAUSE_ORDERED:
11923 case OMP_CLAUSE_UNTIED:
11924 case OMP_CLAUSE_COLLAPSE:
11925 case OMP_CLAUSE_TILE:
11926 case OMP_CLAUSE_AUTO:
11927 case OMP_CLAUSE_SEQ:
11928 case OMP_CLAUSE_INDEPENDENT:
11929 case OMP_CLAUSE_MERGEABLE:
11930 case OMP_CLAUSE_PROC_BIND:
11931 case OMP_CLAUSE_SAFELEN:
11932 case OMP_CLAUSE_SIMDLEN:
11933 case OMP_CLAUSE_NOGROUP:
11934 case OMP_CLAUSE_THREADS:
11935 case OMP_CLAUSE_SIMD:
11936 case OMP_CLAUSE_BIND:
11937 case OMP_CLAUSE_IF_PRESENT:
11938 case OMP_CLAUSE_FINALIZE:
11939 break;
11941 case OMP_CLAUSE_ORDER:
11942 ctx->order_concurrent = true;
11943 break;
11945 case OMP_CLAUSE_DEFAULTMAP:
11946 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
11947 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
11949 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
11950 gdmkmin = GDMK_SCALAR;
11951 gdmkmax = GDMK_POINTER;
11952 break;
11953 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
11954 gdmkmin = GDMK_SCALAR;
11955 gdmkmax = GDMK_SCALAR_TARGET;
11956 break;
11957 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
11958 gdmkmin = gdmkmax = GDMK_AGGREGATE;
11959 break;
11960 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
11961 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
11962 break;
11963 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
11964 gdmkmin = gdmkmax = GDMK_POINTER;
11965 break;
11966 default:
11967 gcc_unreachable ();
11969 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
11970 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
11972 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
11973 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
11974 break;
11975 case OMP_CLAUSE_DEFAULTMAP_TO:
11976 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
11977 break;
11978 case OMP_CLAUSE_DEFAULTMAP_FROM:
11979 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
11980 break;
11981 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
11982 ctx->defaultmap[gdmk] = GOVD_MAP;
11983 break;
11984 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
11985 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
11986 break;
11987 case OMP_CLAUSE_DEFAULTMAP_NONE:
11988 ctx->defaultmap[gdmk] = 0;
11989 break;
11990 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
11991 switch (gdmk)
11993 case GDMK_SCALAR:
11994 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
11995 break;
11996 case GDMK_SCALAR_TARGET:
11997 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
11998 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
11999 break;
12000 case GDMK_AGGREGATE:
12001 case GDMK_ALLOCATABLE:
12002 ctx->defaultmap[gdmk] = GOVD_MAP;
12003 break;
12004 case GDMK_POINTER:
12005 ctx->defaultmap[gdmk] = GOVD_MAP;
12006 if (!lang_GNU_Fortran ())
12007 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
12008 break;
12009 default:
12010 gcc_unreachable ();
12012 break;
12013 default:
12014 gcc_unreachable ();
12016 break;
12018 case OMP_CLAUSE_ALIGNED:
12019 decl = OMP_CLAUSE_DECL (c);
12020 if (error_operand_p (decl))
12022 remove = true;
12023 break;
12025 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
12026 is_gimple_val, fb_rvalue) == GS_ERROR)
12028 remove = true;
12029 break;
12031 if (!is_global_var (decl)
12032 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12033 omp_add_variable (ctx, decl, GOVD_ALIGNED);
12034 break;
12036 case OMP_CLAUSE_NONTEMPORAL:
12037 decl = OMP_CLAUSE_DECL (c);
12038 if (error_operand_p (decl))
12040 remove = true;
12041 break;
12043 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
12044 break;
12046 case OMP_CLAUSE_ALLOCATE:
12047 decl = OMP_CLAUSE_DECL (c);
12048 if (error_operand_p (decl))
12050 remove = true;
12051 break;
12053 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
12054 is_gimple_val, fb_rvalue) == GS_ERROR)
12056 remove = true;
12057 break;
12059 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
12060 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
12061 == INTEGER_CST))
12063 else if (code == OMP_TASKLOOP
12064 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
12065 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12066 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12067 pre_p, NULL, false);
12068 break;
12070 case OMP_CLAUSE_DEFAULT:
12071 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
12072 break;
12074 case OMP_CLAUSE_INCLUSIVE:
12075 case OMP_CLAUSE_EXCLUSIVE:
12076 decl = OMP_CLAUSE_DECL (c);
12078 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
12079 (splay_tree_key) decl);
12080 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
12082 error_at (OMP_CLAUSE_LOCATION (c),
12083 "%qD specified in %qs clause but not in %<inscan%> "
12084 "%<reduction%> clause on the containing construct",
12085 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
12086 remove = true;
12088 else
12090 n->value |= GOVD_REDUCTION_INSCAN;
12091 if (outer_ctx->region_type == ORT_SIMD
12092 && outer_ctx->outer_context
12093 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
12095 n = splay_tree_lookup (outer_ctx->outer_context->variables,
12096 (splay_tree_key) decl);
12097 if (n && (n->value & GOVD_REDUCTION) != 0)
12098 n->value |= GOVD_REDUCTION_INSCAN;
12102 break;
12104 case OMP_CLAUSE_NOHOST:
12105 default:
12106 gcc_unreachable ();
12109 if (code == OACC_DATA
12110 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12111 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12112 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12113 remove = true;
12114 if (remove)
12115 *list_p = OMP_CLAUSE_CHAIN (c);
12116 else
12117 list_p = &OMP_CLAUSE_CHAIN (c);
12120 ctx->clauses = *orig_list_p;
12121 gimplify_omp_ctxp = ctx;
12124 /* Return true if DECL is a candidate for shared to firstprivate
12125 optimization. We only consider non-addressable scalars, not
12126 too big, and not references. */
12128 static bool
12129 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
12131 if (TREE_ADDRESSABLE (decl))
12132 return false;
12133 tree type = TREE_TYPE (decl);
12134 if (!is_gimple_reg_type (type)
12135 || TREE_CODE (type) == REFERENCE_TYPE
12136 || TREE_ADDRESSABLE (type))
12137 return false;
12138 /* Don't optimize too large decls, as each thread/task will have
12139 its own. */
12140 HOST_WIDE_INT len = int_size_in_bytes (type);
12141 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
12142 return false;
12143 if (omp_privatize_by_reference (decl))
12144 return false;
12145 return true;
12148 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
12149 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
12150 GOVD_WRITTEN in outer contexts. */
12152 static void
12153 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
12155 for (; ctx; ctx = ctx->outer_context)
12157 splay_tree_node n = splay_tree_lookup (ctx->variables,
12158 (splay_tree_key) decl);
12159 if (n == NULL)
12160 continue;
12161 else if (n->value & GOVD_SHARED)
12163 n->value |= GOVD_WRITTEN;
12164 return;
12166 else if (n->value & GOVD_DATA_SHARE_CLASS)
12167 return;
12171 /* Helper callback for walk_gimple_seq to discover possible stores
12172 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12173 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12174 for those. */
12176 static tree
12177 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
12179 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12181 *walk_subtrees = 0;
12182 if (!wi->is_lhs)
12183 return NULL_TREE;
12185 tree op = *tp;
12188 if (handled_component_p (op))
12189 op = TREE_OPERAND (op, 0);
12190 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
12191 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
12192 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
12193 else
12194 break;
12196 while (1);
12197 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
12198 return NULL_TREE;
12200 omp_mark_stores (gimplify_omp_ctxp, op);
12201 return NULL_TREE;
12204 /* Helper callback for walk_gimple_seq to discover possible stores
12205 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12206 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12207 for those. */
12209 static tree
12210 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
12211 bool *handled_ops_p,
12212 struct walk_stmt_info *wi)
12214 gimple *stmt = gsi_stmt (*gsi_p);
12215 switch (gimple_code (stmt))
12217 /* Don't recurse on OpenMP constructs for which
12218 gimplify_adjust_omp_clauses already handled the bodies,
12219 except handle gimple_omp_for_pre_body. */
12220 case GIMPLE_OMP_FOR:
12221 *handled_ops_p = true;
12222 if (gimple_omp_for_pre_body (stmt))
12223 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12224 omp_find_stores_stmt, omp_find_stores_op, wi);
12225 break;
12226 case GIMPLE_OMP_PARALLEL:
12227 case GIMPLE_OMP_TASK:
12228 case GIMPLE_OMP_SECTIONS:
12229 case GIMPLE_OMP_SINGLE:
12230 case GIMPLE_OMP_SCOPE:
12231 case GIMPLE_OMP_TARGET:
12232 case GIMPLE_OMP_TEAMS:
12233 case GIMPLE_OMP_CRITICAL:
12234 *handled_ops_p = true;
12235 break;
12236 default:
12237 break;
12239 return NULL_TREE;
12242 struct gimplify_adjust_omp_clauses_data
12244 tree *list_p;
12245 gimple_seq *pre_p;
12248 /* For all variables that were not actually used within the context,
12249 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
12251 static int
12252 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
12254 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
12255 gimple_seq *pre_p
12256 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
12257 tree decl = (tree) n->key;
12258 unsigned flags = n->value;
12259 enum omp_clause_code code;
12260 tree clause;
12261 bool private_debug;
12263 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12264 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
12265 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
12266 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
12267 return 0;
12268 if ((flags & GOVD_SEEN) == 0)
12269 return 0;
12270 if (flags & GOVD_DEBUG_PRIVATE)
12272 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
12273 private_debug = true;
12275 else if (flags & GOVD_MAP)
12276 private_debug = false;
12277 else
12278 private_debug
12279 = lang_hooks.decls.omp_private_debug_clause (decl,
12280 !!(flags & GOVD_SHARED));
12281 if (private_debug)
12282 code = OMP_CLAUSE_PRIVATE;
12283 else if (flags & GOVD_MAP)
12285 code = OMP_CLAUSE_MAP;
12286 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12287 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12289 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
12290 return 0;
12292 if (VAR_P (decl)
12293 && DECL_IN_CONSTANT_POOL (decl)
12294 && !lookup_attribute ("omp declare target",
12295 DECL_ATTRIBUTES (decl)))
12297 tree id = get_identifier ("omp declare target");
12298 DECL_ATTRIBUTES (decl)
12299 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
12300 varpool_node *node = varpool_node::get (decl);
12301 if (node)
12303 node->offloadable = 1;
12304 if (ENABLE_OFFLOADING)
12305 g->have_offload = true;
12309 else if (flags & GOVD_SHARED)
12311 if (is_global_var (decl))
12313 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12314 while (ctx != NULL)
12316 splay_tree_node on
12317 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12318 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
12319 | GOVD_PRIVATE | GOVD_REDUCTION
12320 | GOVD_LINEAR | GOVD_MAP)) != 0)
12321 break;
12322 ctx = ctx->outer_context;
12324 if (ctx == NULL)
12325 return 0;
12327 code = OMP_CLAUSE_SHARED;
12328 /* Don't optimize shared into firstprivate for read-only vars
12329 on tasks with depend clause, we shouldn't try to copy them
12330 until the dependencies are satisfied. */
12331 if (gimplify_omp_ctxp->has_depend)
12332 flags |= GOVD_WRITTEN;
12334 else if (flags & GOVD_PRIVATE)
12335 code = OMP_CLAUSE_PRIVATE;
12336 else if (flags & GOVD_FIRSTPRIVATE)
12338 code = OMP_CLAUSE_FIRSTPRIVATE;
12339 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
12340 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12341 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12343 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
12344 "%<target%> construct", decl);
12345 return 0;
12348 else if (flags & GOVD_LASTPRIVATE)
12349 code = OMP_CLAUSE_LASTPRIVATE;
12350 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
12351 return 0;
12352 else if (flags & GOVD_CONDTEMP)
12354 code = OMP_CLAUSE__CONDTEMP_;
12355 gimple_add_tmp_var (decl);
12357 else
12358 gcc_unreachable ();
12360 if (((flags & GOVD_LASTPRIVATE)
12361 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
12362 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12363 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12365 tree chain = *list_p;
12366 clause = build_omp_clause (input_location, code);
12367 OMP_CLAUSE_DECL (clause) = decl;
12368 OMP_CLAUSE_CHAIN (clause) = chain;
12369 if (private_debug)
12370 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
12371 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
12372 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
12373 else if (code == OMP_CLAUSE_SHARED
12374 && (flags & GOVD_WRITTEN) == 0
12375 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12376 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
12377 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
12378 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
12379 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
12381 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
12382 OMP_CLAUSE_DECL (nc) = decl;
12383 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12384 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12385 OMP_CLAUSE_DECL (clause)
12386 = build_simple_mem_ref_loc (input_location, decl);
12387 OMP_CLAUSE_DECL (clause)
12388 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
12389 build_int_cst (build_pointer_type (char_type_node), 0));
12390 OMP_CLAUSE_SIZE (clause) = size_zero_node;
12391 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12392 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
12393 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
12394 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12395 OMP_CLAUSE_CHAIN (nc) = chain;
12396 OMP_CLAUSE_CHAIN (clause) = nc;
12397 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12398 gimplify_omp_ctxp = ctx->outer_context;
12399 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
12400 pre_p, NULL, is_gimple_val, fb_rvalue);
12401 gimplify_omp_ctxp = ctx;
12403 else if (code == OMP_CLAUSE_MAP)
12405 int kind;
12406 /* Not all combinations of these GOVD_MAP flags are actually valid. */
12407 switch (flags & (GOVD_MAP_TO_ONLY
12408 | GOVD_MAP_FORCE
12409 | GOVD_MAP_FORCE_PRESENT
12410 | GOVD_MAP_ALLOC_ONLY
12411 | GOVD_MAP_FROM_ONLY))
12413 case 0:
12414 kind = GOMP_MAP_TOFROM;
12415 break;
12416 case GOVD_MAP_FORCE:
12417 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
12418 break;
12419 case GOVD_MAP_TO_ONLY:
12420 kind = GOMP_MAP_TO;
12421 break;
12422 case GOVD_MAP_FROM_ONLY:
12423 kind = GOMP_MAP_FROM;
12424 break;
12425 case GOVD_MAP_ALLOC_ONLY:
12426 kind = GOMP_MAP_ALLOC;
12427 break;
12428 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
12429 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
12430 break;
12431 case GOVD_MAP_FORCE_PRESENT:
12432 kind = GOMP_MAP_FORCE_PRESENT;
12433 break;
12434 default:
12435 gcc_unreachable ();
12437 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
12438 /* Setting of the implicit flag for the runtime is currently disabled for
12439 OpenACC. */
12440 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
12441 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
12442 if (DECL_SIZE (decl)
12443 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12445 tree decl2 = DECL_VALUE_EXPR (decl);
12446 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
12447 decl2 = TREE_OPERAND (decl2, 0);
12448 gcc_assert (DECL_P (decl2));
12449 tree mem = build_simple_mem_ref (decl2);
12450 OMP_CLAUSE_DECL (clause) = mem;
12451 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12452 if (gimplify_omp_ctxp->outer_context)
12454 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12455 omp_notice_variable (ctx, decl2, true);
12456 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
12458 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12459 OMP_CLAUSE_MAP);
12460 OMP_CLAUSE_DECL (nc) = decl;
12461 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12462 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
12463 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12464 else
12465 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12466 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12467 OMP_CLAUSE_CHAIN (clause) = nc;
12469 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
12470 && omp_privatize_by_reference (decl))
12472 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
12473 OMP_CLAUSE_SIZE (clause)
12474 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
12475 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12476 gimplify_omp_ctxp = ctx->outer_context;
12477 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
12478 pre_p, NULL, is_gimple_val, fb_rvalue);
12479 gimplify_omp_ctxp = ctx;
12480 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12481 OMP_CLAUSE_MAP);
12482 OMP_CLAUSE_DECL (nc) = decl;
12483 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12484 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
12485 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12486 OMP_CLAUSE_CHAIN (clause) = nc;
12488 else
12489 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
12491 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
12493 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
12494 OMP_CLAUSE_DECL (nc) = decl;
12495 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
12496 OMP_CLAUSE_CHAIN (nc) = chain;
12497 OMP_CLAUSE_CHAIN (clause) = nc;
12498 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12499 gimplify_omp_ctxp = ctx->outer_context;
12500 lang_hooks.decls.omp_finish_clause (nc, pre_p,
12501 (ctx->region_type & ORT_ACC) != 0);
12502 gimplify_omp_ctxp = ctx;
12504 *list_p = clause;
12505 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12506 gimplify_omp_ctxp = ctx->outer_context;
12507 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
12508 in simd. Those are only added for the local vars inside of simd body
12509 and they don't need to be e.g. default constructible. */
12510 if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
12511 lang_hooks.decls.omp_finish_clause (clause, pre_p,
12512 (ctx->region_type & ORT_ACC) != 0);
12513 if (gimplify_omp_ctxp)
12514 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
12515 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
12516 && DECL_P (OMP_CLAUSE_SIZE (clause)))
12517 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
12518 true);
12519 gimplify_omp_ctxp = ctx;
12520 return 0;
12523 static void
12524 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
12525 enum tree_code code)
12527 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12528 tree *orig_list_p = list_p;
12529 tree c, decl;
12530 bool has_inscan_reductions = false;
12532 if (body)
12534 struct gimplify_omp_ctx *octx;
12535 for (octx = ctx; octx; octx = octx->outer_context)
12536 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
12537 break;
12538 if (octx)
12540 struct walk_stmt_info wi;
12541 memset (&wi, 0, sizeof (wi));
12542 walk_gimple_seq (body, omp_find_stores_stmt,
12543 omp_find_stores_op, &wi);
12547 if (ctx->add_safelen1)
12549 /* If there are VLAs in the body of simd loop, prevent
12550 vectorization. */
12551 gcc_assert (ctx->region_type == ORT_SIMD);
12552 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
12553 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
12554 OMP_CLAUSE_CHAIN (c) = *list_p;
12555 *list_p = c;
12556 list_p = &OMP_CLAUSE_CHAIN (c);
12559 if (ctx->region_type == ORT_WORKSHARE
12560 && ctx->outer_context
12561 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
12563 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
12564 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12565 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12567 decl = OMP_CLAUSE_DECL (c);
12568 splay_tree_node n
12569 = splay_tree_lookup (ctx->outer_context->variables,
12570 (splay_tree_key) decl);
12571 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
12572 (splay_tree_key) decl));
12573 omp_add_variable (ctx, decl, n->value);
12574 tree c2 = copy_node (c);
12575 OMP_CLAUSE_CHAIN (c2) = *list_p;
12576 *list_p = c2;
12577 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
12578 continue;
12579 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12580 OMP_CLAUSE_FIRSTPRIVATE);
12581 OMP_CLAUSE_DECL (c2) = decl;
12582 OMP_CLAUSE_CHAIN (c2) = *list_p;
12583 *list_p = c2;
12587 tree attach_list = NULL_TREE;
12588 tree *attach_tail = &attach_list;
12590 while ((c = *list_p) != NULL)
12592 splay_tree_node n;
12593 bool remove = false;
12594 bool move_attach = false;
12596 switch (OMP_CLAUSE_CODE (c))
12598 case OMP_CLAUSE_FIRSTPRIVATE:
12599 if ((ctx->region_type & ORT_TARGET)
12600 && (ctx->region_type & ORT_ACC) == 0
12601 && TYPE_ATOMIC (strip_array_types
12602 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
12604 error_at (OMP_CLAUSE_LOCATION (c),
12605 "%<_Atomic%> %qD in %<firstprivate%> clause on "
12606 "%<target%> construct", OMP_CLAUSE_DECL (c));
12607 remove = true;
12608 break;
12610 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12612 decl = OMP_CLAUSE_DECL (c);
12613 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12614 if ((n->value & GOVD_MAP) != 0)
12616 remove = true;
12617 break;
12619 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
12620 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
12622 /* FALLTHRU */
12623 case OMP_CLAUSE_PRIVATE:
12624 case OMP_CLAUSE_SHARED:
12625 case OMP_CLAUSE_LINEAR:
12626 decl = OMP_CLAUSE_DECL (c);
12627 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12628 remove = !(n->value & GOVD_SEEN);
12629 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
12630 && code == OMP_PARALLEL
12631 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12632 remove = true;
12633 if (! remove)
12635 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
12636 if ((n->value & GOVD_DEBUG_PRIVATE)
12637 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
12639 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
12640 || ((n->value & GOVD_DATA_SHARE_CLASS)
12641 == GOVD_SHARED));
12642 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
12643 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
12645 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12646 && ctx->has_depend
12647 && DECL_P (decl))
12648 n->value |= GOVD_WRITTEN;
12649 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12650 && (n->value & GOVD_WRITTEN) == 0
12651 && DECL_P (decl)
12652 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12653 OMP_CLAUSE_SHARED_READONLY (c) = 1;
12654 else if (DECL_P (decl)
12655 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12656 && (n->value & GOVD_WRITTEN) != 0)
12657 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12658 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
12659 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12660 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12662 else
12663 n->value &= ~GOVD_EXPLICIT;
12664 break;
12666 case OMP_CLAUSE_LASTPRIVATE:
12667 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
12668 accurately reflect the presence of a FIRSTPRIVATE clause. */
12669 decl = OMP_CLAUSE_DECL (c);
12670 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12671 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
12672 = (n->value & GOVD_FIRSTPRIVATE) != 0;
12673 if (code == OMP_DISTRIBUTE
12674 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12676 remove = true;
12677 error_at (OMP_CLAUSE_LOCATION (c),
12678 "same variable used in %<firstprivate%> and "
12679 "%<lastprivate%> clauses on %<distribute%> "
12680 "construct");
12682 if (!remove
12683 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12684 && DECL_P (decl)
12685 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12686 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12687 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
12688 remove = true;
12689 break;
12691 case OMP_CLAUSE_ALIGNED:
12692 decl = OMP_CLAUSE_DECL (c);
12693 if (!is_global_var (decl))
12695 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12696 remove = n == NULL || !(n->value & GOVD_SEEN);
12697 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12699 struct gimplify_omp_ctx *octx;
12700 if (n != NULL
12701 && (n->value & (GOVD_DATA_SHARE_CLASS
12702 & ~GOVD_FIRSTPRIVATE)))
12703 remove = true;
12704 else
12705 for (octx = ctx->outer_context; octx;
12706 octx = octx->outer_context)
12708 n = splay_tree_lookup (octx->variables,
12709 (splay_tree_key) decl);
12710 if (n == NULL)
12711 continue;
12712 if (n->value & GOVD_LOCAL)
12713 break;
12714 /* We have to avoid assigning a shared variable
12715 to itself when trying to add
12716 __builtin_assume_aligned. */
12717 if (n->value & GOVD_SHARED)
12719 remove = true;
12720 break;
12725 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
12727 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12728 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
12729 remove = true;
12731 break;
12733 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12734 decl = OMP_CLAUSE_DECL (c);
12735 while (TREE_CODE (decl) == INDIRECT_REF
12736 || TREE_CODE (decl) == ARRAY_REF)
12737 decl = TREE_OPERAND (decl, 0);
12738 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12739 remove = n == NULL || !(n->value & GOVD_SEEN);
12740 break;
12742 case OMP_CLAUSE_IS_DEVICE_PTR:
12743 case OMP_CLAUSE_NONTEMPORAL:
12744 decl = OMP_CLAUSE_DECL (c);
12745 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12746 remove = n == NULL || !(n->value & GOVD_SEEN);
12747 break;
12749 case OMP_CLAUSE_MAP:
12750 if (code == OMP_TARGET_EXIT_DATA
12751 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
12753 remove = true;
12754 break;
12756 /* If we have a target region, we can push all the attaches to the
12757 end of the list (we may have standalone "attach" operations
12758 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
12759 the attachment point AND the pointed-to block have been mapped).
12760 If we have something else, e.g. "enter data", we need to keep
12761 "attach" nodes together with the previous node they attach to so
12762 that separate "exit data" operations work properly (see
12763 libgomp/target.c). */
12764 if ((ctx->region_type & ORT_TARGET) != 0
12765 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12766 || (OMP_CLAUSE_MAP_KIND (c)
12767 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
12768 move_attach = true;
12769 decl = OMP_CLAUSE_DECL (c);
12770 /* Data clauses associated with reductions must be
12771 compatible with present_or_copy. Warn and adjust the clause
12772 if that is not the case. */
12773 if (ctx->region_type == ORT_ACC_PARALLEL
12774 || ctx->region_type == ORT_ACC_SERIAL)
12776 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
12777 n = NULL;
12779 if (DECL_P (t))
12780 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
12782 if (n && (n->value & GOVD_REDUCTION))
12784 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
12786 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
12787 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
12788 && kind != GOMP_MAP_FORCE_PRESENT
12789 && kind != GOMP_MAP_POINTER)
12791 warning_at (OMP_CLAUSE_LOCATION (c), 0,
12792 "incompatible data clause with reduction "
12793 "on %qE; promoting to %<present_or_copy%>",
12794 DECL_NAME (t));
12795 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
12799 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
12800 && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
12802 remove = true;
12803 break;
12805 if (!DECL_P (decl))
12807 if ((ctx->region_type & ORT_TARGET) != 0
12808 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
12810 if (TREE_CODE (decl) == INDIRECT_REF
12811 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
12812 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
12813 == REFERENCE_TYPE))
12814 decl = TREE_OPERAND (decl, 0);
12815 if (TREE_CODE (decl) == COMPONENT_REF)
12817 while (TREE_CODE (decl) == COMPONENT_REF)
12818 decl = TREE_OPERAND (decl, 0);
12819 if (DECL_P (decl))
12821 n = splay_tree_lookup (ctx->variables,
12822 (splay_tree_key) decl);
12823 if (!(n->value & GOVD_SEEN))
12824 remove = true;
12828 break;
12830 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12831 if ((ctx->region_type & ORT_TARGET) != 0
12832 && !(n->value & GOVD_SEEN)
12833 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
12834 && (!is_global_var (decl)
12835 || !lookup_attribute ("omp declare target link",
12836 DECL_ATTRIBUTES (decl))))
12838 remove = true;
12839 /* For struct element mapping, if struct is never referenced
12840 in target block and none of the mapping has always modifier,
12841 remove all the struct element mappings, which immediately
12842 follow the GOMP_MAP_STRUCT map clause. */
12843 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
12845 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
12846 while (cnt--)
12847 OMP_CLAUSE_CHAIN (c)
12848 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
12851 else if (DECL_SIZE (decl)
12852 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
12853 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
12854 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
12855 && (OMP_CLAUSE_MAP_KIND (c)
12856 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12858 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
12859 for these, TREE_CODE (DECL_SIZE (decl)) will always be
12860 INTEGER_CST. */
12861 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
12863 tree decl2 = DECL_VALUE_EXPR (decl);
12864 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
12865 decl2 = TREE_OPERAND (decl2, 0);
12866 gcc_assert (DECL_P (decl2));
12867 tree mem = build_simple_mem_ref (decl2);
12868 OMP_CLAUSE_DECL (c) = mem;
12869 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12870 if (ctx->outer_context)
12872 omp_notice_variable (ctx->outer_context, decl2, true);
12873 omp_notice_variable (ctx->outer_context,
12874 OMP_CLAUSE_SIZE (c), true);
12876 if (((ctx->region_type & ORT_TARGET) != 0
12877 || !ctx->target_firstprivatize_array_bases)
12878 && ((n->value & GOVD_SEEN) == 0
12879 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
12881 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12882 OMP_CLAUSE_MAP);
12883 OMP_CLAUSE_DECL (nc) = decl;
12884 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12885 if (ctx->target_firstprivatize_array_bases)
12886 OMP_CLAUSE_SET_MAP_KIND (nc,
12887 GOMP_MAP_FIRSTPRIVATE_POINTER);
12888 else
12889 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12890 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
12891 OMP_CLAUSE_CHAIN (c) = nc;
12892 c = nc;
12895 else
12897 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12898 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
12899 gcc_assert ((n->value & GOVD_SEEN) == 0
12900 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
12901 == 0));
12903 break;
12905 case OMP_CLAUSE_TO:
12906 case OMP_CLAUSE_FROM:
12907 case OMP_CLAUSE__CACHE_:
12908 decl = OMP_CLAUSE_DECL (c);
12909 if (!DECL_P (decl))
12910 break;
12911 if (DECL_SIZE (decl)
12912 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12914 tree decl2 = DECL_VALUE_EXPR (decl);
12915 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
12916 decl2 = TREE_OPERAND (decl2, 0);
12917 gcc_assert (DECL_P (decl2));
12918 tree mem = build_simple_mem_ref (decl2);
12919 OMP_CLAUSE_DECL (c) = mem;
12920 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12921 if (ctx->outer_context)
12923 omp_notice_variable (ctx->outer_context, decl2, true);
12924 omp_notice_variable (ctx->outer_context,
12925 OMP_CLAUSE_SIZE (c), true);
12928 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12929 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
12930 break;
12932 case OMP_CLAUSE_REDUCTION:
12933 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
12935 decl = OMP_CLAUSE_DECL (c);
12936 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12937 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
12939 remove = true;
12940 error_at (OMP_CLAUSE_LOCATION (c),
12941 "%qD specified in %<inscan%> %<reduction%> clause "
12942 "but not in %<scan%> directive clause", decl);
12943 break;
12945 has_inscan_reductions = true;
12947 /* FALLTHRU */
12948 case OMP_CLAUSE_IN_REDUCTION:
12949 case OMP_CLAUSE_TASK_REDUCTION:
12950 decl = OMP_CLAUSE_DECL (c);
12951 /* OpenACC reductions need a present_or_copy data clause.
12952 Add one if necessary. Emit error when the reduction is private. */
12953 if (ctx->region_type == ORT_ACC_PARALLEL
12954 || ctx->region_type == ORT_ACC_SERIAL)
12956 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12957 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
12959 remove = true;
12960 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
12961 "reduction on %qE", DECL_NAME (decl));
12963 else if ((n->value & GOVD_MAP) == 0)
12965 tree next = OMP_CLAUSE_CHAIN (c);
12966 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
12967 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
12968 OMP_CLAUSE_DECL (nc) = decl;
12969 OMP_CLAUSE_CHAIN (c) = nc;
12970 lang_hooks.decls.omp_finish_clause (nc, pre_p,
12971 (ctx->region_type
12972 & ORT_ACC) != 0);
12973 while (1)
12975 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
12976 if (OMP_CLAUSE_CHAIN (nc) == NULL)
12977 break;
12978 nc = OMP_CLAUSE_CHAIN (nc);
12980 OMP_CLAUSE_CHAIN (nc) = next;
12981 n->value |= GOVD_MAP;
12984 if (DECL_P (decl)
12985 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12986 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12987 break;
12989 case OMP_CLAUSE_ALLOCATE:
12990 decl = OMP_CLAUSE_DECL (c);
12991 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12992 if (n != NULL && !(n->value & GOVD_SEEN))
12994 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
12995 != 0
12996 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
12997 remove = true;
12999 if (!remove
13000 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
13001 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
13002 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
13003 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
13004 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
13006 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
13007 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
13008 if (n == NULL)
13010 enum omp_clause_default_kind default_kind
13011 = ctx->default_kind;
13012 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
13013 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13014 true);
13015 ctx->default_kind = default_kind;
13017 else
13018 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13019 true);
13021 break;
13023 case OMP_CLAUSE_COPYIN:
13024 case OMP_CLAUSE_COPYPRIVATE:
13025 case OMP_CLAUSE_IF:
13026 case OMP_CLAUSE_NUM_THREADS:
13027 case OMP_CLAUSE_NUM_TEAMS:
13028 case OMP_CLAUSE_THREAD_LIMIT:
13029 case OMP_CLAUSE_DIST_SCHEDULE:
13030 case OMP_CLAUSE_DEVICE:
13031 case OMP_CLAUSE_SCHEDULE:
13032 case OMP_CLAUSE_NOWAIT:
13033 case OMP_CLAUSE_ORDERED:
13034 case OMP_CLAUSE_DEFAULT:
13035 case OMP_CLAUSE_UNTIED:
13036 case OMP_CLAUSE_COLLAPSE:
13037 case OMP_CLAUSE_FINAL:
13038 case OMP_CLAUSE_MERGEABLE:
13039 case OMP_CLAUSE_PROC_BIND:
13040 case OMP_CLAUSE_SAFELEN:
13041 case OMP_CLAUSE_SIMDLEN:
13042 case OMP_CLAUSE_DEPEND:
13043 case OMP_CLAUSE_DOACROSS:
13044 case OMP_CLAUSE_PRIORITY:
13045 case OMP_CLAUSE_GRAINSIZE:
13046 case OMP_CLAUSE_NUM_TASKS:
13047 case OMP_CLAUSE_NOGROUP:
13048 case OMP_CLAUSE_THREADS:
13049 case OMP_CLAUSE_SIMD:
13050 case OMP_CLAUSE_FILTER:
13051 case OMP_CLAUSE_HINT:
13052 case OMP_CLAUSE_DEFAULTMAP:
13053 case OMP_CLAUSE_ORDER:
13054 case OMP_CLAUSE_BIND:
13055 case OMP_CLAUSE_DETACH:
13056 case OMP_CLAUSE_USE_DEVICE_PTR:
13057 case OMP_CLAUSE_USE_DEVICE_ADDR:
13058 case OMP_CLAUSE_ASYNC:
13059 case OMP_CLAUSE_WAIT:
13060 case OMP_CLAUSE_INDEPENDENT:
13061 case OMP_CLAUSE_NUM_GANGS:
13062 case OMP_CLAUSE_NUM_WORKERS:
13063 case OMP_CLAUSE_VECTOR_LENGTH:
13064 case OMP_CLAUSE_GANG:
13065 case OMP_CLAUSE_WORKER:
13066 case OMP_CLAUSE_VECTOR:
13067 case OMP_CLAUSE_AUTO:
13068 case OMP_CLAUSE_SEQ:
13069 case OMP_CLAUSE_TILE:
13070 case OMP_CLAUSE_IF_PRESENT:
13071 case OMP_CLAUSE_FINALIZE:
13072 case OMP_CLAUSE_INCLUSIVE:
13073 case OMP_CLAUSE_EXCLUSIVE:
13074 break;
13076 case OMP_CLAUSE_NOHOST:
13077 default:
13078 gcc_unreachable ();
13081 if (remove)
13082 *list_p = OMP_CLAUSE_CHAIN (c);
13083 else if (move_attach)
13085 /* Remove attach node from here, separate out into its own list. */
13086 *attach_tail = c;
13087 *list_p = OMP_CLAUSE_CHAIN (c);
13088 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13089 attach_tail = &OMP_CLAUSE_CHAIN (c);
13091 else
13092 list_p = &OMP_CLAUSE_CHAIN (c);
13095 /* Splice attach nodes at the end of the list. */
13096 if (attach_list)
13098 *list_p = attach_list;
13099 list_p = attach_tail;
13102 /* Add in any implicit data sharing. */
13103 struct gimplify_adjust_omp_clauses_data data;
13104 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
13106 /* OpenMP. Implicit clauses are added at the start of the clause list,
13107 but after any non-map clauses. */
13108 tree *implicit_add_list_p = orig_list_p;
13109 while (*implicit_add_list_p
13110 && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
13111 implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
13112 data.list_p = implicit_add_list_p;
13114 else
13115 /* OpenACC. */
13116 data.list_p = list_p;
13117 data.pre_p = pre_p;
13118 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
13120 if (has_inscan_reductions)
13121 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
13122 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13123 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
13125 error_at (OMP_CLAUSE_LOCATION (c),
13126 "%<inscan%> %<reduction%> clause used together with "
13127 "%<linear%> clause for a variable other than loop "
13128 "iterator");
13129 break;
13132 gimplify_omp_ctxp = ctx->outer_context;
13133 delete_omp_context (ctx);
13136 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
13137 -1 if unknown yet (simd is involved, won't be known until vectorization)
13138 and 1 if they do. If SCORES is non-NULL, it should point to an array
13139 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
13140 of the CONSTRUCTS (position -1 if it will never match) followed by
13141 number of constructs in the OpenMP context construct trait. If the
13142 score depends on whether it will be in a declare simd clone or not,
13143 the function returns 2 and there will be two sets of the scores, the first
13144 one for the case that it is not in a declare simd clone, the other
13145 that it is in a declare simd clone. */
13148 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
13149 int *scores)
13151 int matched = 0, cnt = 0;
13152 bool simd_seen = false;
13153 bool target_seen = false;
13154 int declare_simd_cnt = -1;
13155 auto_vec<enum tree_code, 16> codes;
13156 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
13158 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
13159 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
13160 == ORT_TARGET && ctx->code == OMP_TARGET)
13161 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
13162 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
13163 || (ctx->region_type == ORT_SIMD
13164 && ctx->code == OMP_SIMD
13165 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
13167 ++cnt;
13168 if (scores)
13169 codes.safe_push (ctx->code);
13170 else if (matched < nconstructs && ctx->code == constructs[matched])
13172 if (ctx->code == OMP_SIMD)
13174 if (matched)
13175 return 0;
13176 simd_seen = true;
13178 ++matched;
13180 if (ctx->code == OMP_TARGET)
13182 if (scores == NULL)
13183 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
13184 target_seen = true;
13185 break;
13188 else if (ctx->region_type == ORT_WORKSHARE
13189 && ctx->code == OMP_LOOP
13190 && ctx->outer_context
13191 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
13192 && ctx->outer_context->outer_context
13193 && ctx->outer_context->outer_context->code == OMP_LOOP
13194 && ctx->outer_context->outer_context->distribute)
13195 ctx = ctx->outer_context->outer_context;
13196 ctx = ctx->outer_context;
13198 if (!target_seen
13199 && lookup_attribute ("omp declare simd",
13200 DECL_ATTRIBUTES (current_function_decl)))
13202 /* Declare simd is a maybe case, it is supposed to be added only to the
13203 omp-simd-clone.cc added clones and not to the base function. */
13204 declare_simd_cnt = cnt++;
13205 if (scores)
13206 codes.safe_push (OMP_SIMD);
13207 else if (cnt == 0
13208 && constructs[0] == OMP_SIMD)
13210 gcc_assert (matched == 0);
13211 simd_seen = true;
13212 if (++matched == nconstructs)
13213 return -1;
13216 if (tree attr = lookup_attribute ("omp declare variant variant",
13217 DECL_ATTRIBUTES (current_function_decl)))
13219 enum tree_code variant_constructs[5];
13220 int variant_nconstructs = 0;
13221 if (!target_seen)
13222 variant_nconstructs
13223 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
13224 variant_constructs);
13225 for (int i = 0; i < variant_nconstructs; i++)
13227 ++cnt;
13228 if (scores)
13229 codes.safe_push (variant_constructs[i]);
13230 else if (matched < nconstructs
13231 && variant_constructs[i] == constructs[matched])
13233 if (variant_constructs[i] == OMP_SIMD)
13235 if (matched)
13236 return 0;
13237 simd_seen = true;
13239 ++matched;
13243 if (!target_seen
13244 && lookup_attribute ("omp declare target block",
13245 DECL_ATTRIBUTES (current_function_decl)))
13247 if (scores)
13248 codes.safe_push (OMP_TARGET);
13249 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
13250 ++matched;
13252 if (scores)
13254 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
13256 int j = codes.length () - 1;
13257 for (int i = nconstructs - 1; i >= 0; i--)
13259 while (j >= 0
13260 && (pass != 0 || declare_simd_cnt != j)
13261 && constructs[i] != codes[j])
13262 --j;
13263 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
13264 *scores++ = j - 1;
13265 else
13266 *scores++ = j;
13268 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
13269 ? codes.length () - 1 : codes.length ());
13271 return declare_simd_cnt == -1 ? 1 : 2;
13273 if (matched == nconstructs)
13274 return simd_seen ? -1 : 1;
13275 return 0;
13278 /* Gimplify OACC_CACHE. */
13280 static void
13281 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
13283 tree expr = *expr_p;
13285 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
13286 OACC_CACHE);
13287 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
13288 OACC_CACHE);
13290 /* TODO: Do something sensible with this information. */
13292 *expr_p = NULL_TREE;
13295 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
13296 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
13297 kind. The entry kind will replace the one in CLAUSE, while the exit
13298 kind will be used in a new omp_clause and returned to the caller. */
13300 static tree
13301 gimplify_oacc_declare_1 (tree clause)
13303 HOST_WIDE_INT kind, new_op;
13304 bool ret = false;
13305 tree c = NULL;
13307 kind = OMP_CLAUSE_MAP_KIND (clause);
13309 switch (kind)
13311 case GOMP_MAP_ALLOC:
13312 new_op = GOMP_MAP_RELEASE;
13313 ret = true;
13314 break;
13316 case GOMP_MAP_FROM:
13317 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
13318 new_op = GOMP_MAP_FROM;
13319 ret = true;
13320 break;
13322 case GOMP_MAP_TOFROM:
13323 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
13324 new_op = GOMP_MAP_FROM;
13325 ret = true;
13326 break;
13328 case GOMP_MAP_DEVICE_RESIDENT:
13329 case GOMP_MAP_FORCE_DEVICEPTR:
13330 case GOMP_MAP_FORCE_PRESENT:
13331 case GOMP_MAP_LINK:
13332 case GOMP_MAP_POINTER:
13333 case GOMP_MAP_TO:
13334 break;
13336 default:
13337 gcc_unreachable ();
13338 break;
13341 if (ret)
13343 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
13344 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
13345 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
13348 return c;
13351 /* Gimplify OACC_DECLARE. */
13353 static void
13354 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
13356 tree expr = *expr_p;
13357 gomp_target *stmt;
13358 tree clauses, t, decl;
13360 clauses = OACC_DECLARE_CLAUSES (expr);
13362 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
13363 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
13365 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
13367 decl = OMP_CLAUSE_DECL (t);
13369 if (TREE_CODE (decl) == MEM_REF)
13370 decl = TREE_OPERAND (decl, 0);
13372 if (VAR_P (decl) && !is_oacc_declared (decl))
13374 tree attr = get_identifier ("oacc declare target");
13375 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
13376 DECL_ATTRIBUTES (decl));
13379 if (VAR_P (decl)
13380 && !is_global_var (decl)
13381 && DECL_CONTEXT (decl) == current_function_decl)
13383 tree c = gimplify_oacc_declare_1 (t);
13384 if (c)
13386 if (oacc_declare_returns == NULL)
13387 oacc_declare_returns = new hash_map<tree, tree>;
13389 oacc_declare_returns->put (decl, c);
13393 if (gimplify_omp_ctxp)
13394 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
13397 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
13398 clauses);
13400 gimplify_seq_add_stmt (pre_p, stmt);
13402 *expr_p = NULL_TREE;
13405 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
13406 gimplification of the body, as well as scanning the body for used
13407 variables. We need to do this scan now, because variable-sized
13408 decls will be decomposed during gimplification. */
13410 static void
13411 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
13413 tree expr = *expr_p;
13414 gimple *g;
13415 gimple_seq body = NULL;
13417 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
13418 OMP_PARALLEL_COMBINED (expr)
13419 ? ORT_COMBINED_PARALLEL
13420 : ORT_PARALLEL, OMP_PARALLEL);
13422 push_gimplify_context ();
13424 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
13425 if (gimple_code (g) == GIMPLE_BIND)
13426 pop_gimplify_context (g);
13427 else
13428 pop_gimplify_context (NULL);
13430 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
13431 OMP_PARALLEL);
13433 g = gimple_build_omp_parallel (body,
13434 OMP_PARALLEL_CLAUSES (expr),
13435 NULL_TREE, NULL_TREE);
13436 if (OMP_PARALLEL_COMBINED (expr))
13437 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
13438 gimplify_seq_add_stmt (pre_p, g);
13439 *expr_p = NULL_TREE;
13442 /* Gimplify the contents of an OMP_TASK statement. This involves
13443 gimplification of the body, as well as scanning the body for used
13444 variables. We need to do this scan now, because variable-sized
13445 decls will be decomposed during gimplification. */
13447 static void
13448 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
13450 tree expr = *expr_p;
13451 gimple *g;
13452 gimple_seq body = NULL;
13453 bool nowait = false;
13454 bool has_depend = false;
13456 if (OMP_TASK_BODY (expr) == NULL_TREE)
13458 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13459 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
13461 has_depend = true;
13462 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
13464 error_at (OMP_CLAUSE_LOCATION (c),
13465 "%<mutexinoutset%> kind in %<depend%> clause on a "
13466 "%<taskwait%> construct");
13467 break;
13470 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT)
13471 nowait = true;
13472 if (nowait && !has_depend)
13474 error_at (EXPR_LOCATION (expr),
13475 "%<taskwait%> construct with %<nowait%> clause but no "
13476 "%<depend%> clauses");
13477 *expr_p = NULL_TREE;
13478 return;
13482 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
13483 omp_find_clause (OMP_TASK_CLAUSES (expr),
13484 OMP_CLAUSE_UNTIED)
13485 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
13487 if (OMP_TASK_BODY (expr))
13489 push_gimplify_context ();
13491 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
13492 if (gimple_code (g) == GIMPLE_BIND)
13493 pop_gimplify_context (g);
13494 else
13495 pop_gimplify_context (NULL);
13498 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
13499 OMP_TASK);
13501 g = gimple_build_omp_task (body,
13502 OMP_TASK_CLAUSES (expr),
13503 NULL_TREE, NULL_TREE,
13504 NULL_TREE, NULL_TREE, NULL_TREE);
13505 if (OMP_TASK_BODY (expr) == NULL_TREE)
13506 gimple_omp_task_set_taskwait_p (g, true);
13507 gimplify_seq_add_stmt (pre_p, g);
13508 *expr_p = NULL_TREE;
13511 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
13512 force it into a temporary initialized in PRE_P and add firstprivate clause
13513 to ORIG_FOR_STMT. */
13515 static void
13516 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
13517 tree orig_for_stmt)
13519 if (*tp == NULL || is_gimple_constant (*tp))
13520 return;
13522 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
13523 /* Reference to pointer conversion is considered useless,
13524 but is significant for firstprivate clause. Force it
13525 here. */
13526 if (type
13527 && TREE_CODE (type) == POINTER_TYPE
13528 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
13530 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
13531 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
13532 gimplify_and_add (m, pre_p);
13533 *tp = v;
13536 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
13537 OMP_CLAUSE_DECL (c) = *tp;
13538 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
13539 OMP_FOR_CLAUSES (orig_for_stmt) = c;
13542 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
13543 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
13545 static tree
13546 find_standalone_omp_ordered (tree *tp, int *walk_subtrees, void *)
13548 switch (TREE_CODE (*tp))
13550 case OMP_ORDERED:
13551 if (OMP_ORDERED_BODY (*tp) == NULL_TREE)
13552 return *tp;
13553 break;
13554 case OMP_SIMD:
13555 case OMP_PARALLEL:
13556 case OMP_TARGET:
13557 *walk_subtrees = 0;
13558 break;
13559 default:
13560 break;
13562 return NULL_TREE;
13565 /* Gimplify the gross structure of an OMP_FOR statement. */
13567 static enum gimplify_status
13568 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
13570 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
13571 enum gimplify_status ret = GS_ALL_DONE;
13572 enum gimplify_status tret;
13573 gomp_for *gfor;
13574 gimple_seq for_body, for_pre_body;
13575 int i;
13576 bitmap has_decl_expr = NULL;
13577 enum omp_region_type ort = ORT_WORKSHARE;
13578 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
13580 orig_for_stmt = for_stmt = *expr_p;
13582 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
13583 != NULL_TREE);
13584 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13586 tree *data[4] = { NULL, NULL, NULL, NULL };
13587 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
13588 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
13589 find_combined_omp_for, data, NULL);
13590 if (inner_for_stmt == NULL_TREE)
13592 gcc_assert (seen_error ());
13593 *expr_p = NULL_TREE;
13594 return GS_ERROR;
13596 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
13598 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
13599 &OMP_FOR_PRE_BODY (for_stmt));
13600 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
13602 if (OMP_FOR_PRE_BODY (inner_for_stmt))
13604 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
13605 &OMP_FOR_PRE_BODY (for_stmt));
13606 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
13609 if (data[0])
13611 /* We have some statements or variable declarations in between
13612 the composite construct directives. Move them around the
13613 inner_for_stmt. */
13614 data[0] = expr_p;
13615 for (i = 0; i < 3; i++)
13616 if (data[i])
13618 tree t = *data[i];
13619 if (i < 2 && data[i + 1] == &OMP_BODY (t))
13620 data[i + 1] = data[i];
13621 *data[i] = OMP_BODY (t);
13622 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
13623 NULL_TREE, make_node (BLOCK));
13624 OMP_BODY (t) = body;
13625 append_to_statement_list_force (inner_for_stmt,
13626 &BIND_EXPR_BODY (body));
13627 *data[3] = t;
13628 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
13629 gcc_assert (*data[3] == inner_for_stmt);
13631 return GS_OK;
13634 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13635 if (!loop_p
13636 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
13637 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13638 i)) == TREE_LIST
13639 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13640 i)))
13642 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13643 /* Class iterators aren't allowed on OMP_SIMD, so the only
13644 case we need to solve is distribute parallel for. They are
13645 allowed on the loop construct, but that is already handled
13646 in gimplify_omp_loop. */
13647 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
13648 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
13649 && data[1]);
13650 tree orig_decl = TREE_PURPOSE (orig);
13651 tree last = TREE_VALUE (orig);
13652 tree *pc;
13653 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
13654 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
13655 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
13656 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
13657 && OMP_CLAUSE_DECL (*pc) == orig_decl)
13658 break;
13659 if (*pc == NULL_TREE)
13661 tree *spc;
13662 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
13663 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
13664 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
13665 && OMP_CLAUSE_DECL (*spc) == orig_decl)
13666 break;
13667 if (*spc)
13669 tree c = *spc;
13670 *spc = OMP_CLAUSE_CHAIN (c);
13671 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13672 *pc = c;
13675 if (*pc == NULL_TREE)
13677 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
13679 /* private clause will appear only on inner_for_stmt.
13680 Change it into firstprivate, and add private clause
13681 on for_stmt. */
13682 tree c = copy_node (*pc);
13683 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
13684 OMP_FOR_CLAUSES (for_stmt) = c;
13685 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
13686 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
13688 else
13690 /* lastprivate clause will appear on both inner_for_stmt
13691 and for_stmt. Add firstprivate clause to
13692 inner_for_stmt. */
13693 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
13694 OMP_CLAUSE_FIRSTPRIVATE);
13695 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
13696 OMP_CLAUSE_CHAIN (c) = *pc;
13697 *pc = c;
13698 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
13700 tree c = build_omp_clause (UNKNOWN_LOCATION,
13701 OMP_CLAUSE_FIRSTPRIVATE);
13702 OMP_CLAUSE_DECL (c) = last;
13703 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13704 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13705 c = build_omp_clause (UNKNOWN_LOCATION,
13706 *pc ? OMP_CLAUSE_SHARED
13707 : OMP_CLAUSE_FIRSTPRIVATE);
13708 OMP_CLAUSE_DECL (c) = orig_decl;
13709 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13710 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13712 /* Similarly, take care of C++ range for temporaries, those should
13713 be firstprivate on OMP_PARALLEL if any. */
13714 if (data[1])
13715 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13716 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
13717 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13718 i)) == TREE_LIST
13719 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13720 i)))
13722 tree orig
13723 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13724 tree v = TREE_CHAIN (orig);
13725 tree c = build_omp_clause (UNKNOWN_LOCATION,
13726 OMP_CLAUSE_FIRSTPRIVATE);
13727 /* First add firstprivate clause for the __for_end artificial
13728 decl. */
13729 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
13730 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
13731 == REFERENCE_TYPE)
13732 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
13733 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13734 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13735 if (TREE_VEC_ELT (v, 0))
13737 /* And now the same for __for_range artificial decl if it
13738 exists. */
13739 c = build_omp_clause (UNKNOWN_LOCATION,
13740 OMP_CLAUSE_FIRSTPRIVATE);
13741 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
13742 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
13743 == REFERENCE_TYPE)
13744 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
13745 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13746 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13751 switch (TREE_CODE (for_stmt))
13753 case OMP_FOR:
13754 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
13756 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13757 OMP_CLAUSE_SCHEDULE))
13758 error_at (EXPR_LOCATION (for_stmt),
13759 "%qs clause may not appear on non-rectangular %qs",
13760 "schedule", lang_GNU_Fortran () ? "do" : "for");
13761 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
13762 error_at (EXPR_LOCATION (for_stmt),
13763 "%qs clause may not appear on non-rectangular %qs",
13764 "ordered", lang_GNU_Fortran () ? "do" : "for");
13766 break;
13767 case OMP_DISTRIBUTE:
13768 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
13769 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13770 OMP_CLAUSE_DIST_SCHEDULE))
13771 error_at (EXPR_LOCATION (for_stmt),
13772 "%qs clause may not appear on non-rectangular %qs",
13773 "dist_schedule", "distribute");
13774 break;
13775 case OACC_LOOP:
13776 ort = ORT_ACC;
13777 break;
13778 case OMP_TASKLOOP:
13779 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
13781 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13782 OMP_CLAUSE_GRAINSIZE))
13783 error_at (EXPR_LOCATION (for_stmt),
13784 "%qs clause may not appear on non-rectangular %qs",
13785 "grainsize", "taskloop");
13786 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13787 OMP_CLAUSE_NUM_TASKS))
13788 error_at (EXPR_LOCATION (for_stmt),
13789 "%qs clause may not appear on non-rectangular %qs",
13790 "num_tasks", "taskloop");
13792 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
13793 ort = ORT_UNTIED_TASKLOOP;
13794 else
13795 ort = ORT_TASKLOOP;
13796 break;
13797 case OMP_SIMD:
13798 ort = ORT_SIMD;
13799 break;
13800 default:
13801 gcc_unreachable ();
13804 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
13805 clause for the IV. */
13806 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
13808 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
13809 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13810 decl = TREE_OPERAND (t, 0);
13811 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
13812 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13813 && OMP_CLAUSE_DECL (c) == decl)
13815 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
13816 break;
13820 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
13821 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
13822 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
13823 ? OMP_LOOP : TREE_CODE (for_stmt));
13825 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
13826 gimplify_omp_ctxp->distribute = true;
13828 /* Handle OMP_FOR_INIT. */
13829 for_pre_body = NULL;
13830 if ((ort == ORT_SIMD
13831 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
13832 && OMP_FOR_PRE_BODY (for_stmt))
13834 has_decl_expr = BITMAP_ALLOC (NULL);
13835 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
13836 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
13837 == VAR_DECL)
13839 t = OMP_FOR_PRE_BODY (for_stmt);
13840 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
13842 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
13844 tree_stmt_iterator si;
13845 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
13846 tsi_next (&si))
13848 t = tsi_stmt (si);
13849 if (TREE_CODE (t) == DECL_EXPR
13850 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
13851 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
13855 if (OMP_FOR_PRE_BODY (for_stmt))
13857 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
13858 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
13859 else
13861 struct gimplify_omp_ctx ctx;
13862 memset (&ctx, 0, sizeof (ctx));
13863 ctx.region_type = ORT_NONE;
13864 gimplify_omp_ctxp = &ctx;
13865 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
13866 gimplify_omp_ctxp = NULL;
13869 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
13871 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13872 for_stmt = inner_for_stmt;
13874 /* For taskloop, need to gimplify the start, end and step before the
13875 taskloop, outside of the taskloop omp context. */
13876 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
13878 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13880 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13881 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
13882 ? pre_p : &for_pre_body);
13883 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
13884 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
13886 tree v = TREE_OPERAND (t, 1);
13887 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
13888 for_pre_p, orig_for_stmt);
13889 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
13890 for_pre_p, orig_for_stmt);
13892 else
13893 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
13894 orig_for_stmt);
13896 /* Handle OMP_FOR_COND. */
13897 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
13898 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
13900 tree v = TREE_OPERAND (t, 1);
13901 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
13902 for_pre_p, orig_for_stmt);
13903 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
13904 for_pre_p, orig_for_stmt);
13906 else
13907 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
13908 orig_for_stmt);
13910 /* Handle OMP_FOR_INCR. */
13911 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
13912 if (TREE_CODE (t) == MODIFY_EXPR)
13914 decl = TREE_OPERAND (t, 0);
13915 t = TREE_OPERAND (t, 1);
13916 tree *tp = &TREE_OPERAND (t, 1);
13917 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
13918 tp = &TREE_OPERAND (t, 0);
13920 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
13921 orig_for_stmt);
13925 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
13926 OMP_TASKLOOP);
13929 if (orig_for_stmt != for_stmt)
13930 gimplify_omp_ctxp->combined_loop = true;
13932 for_body = NULL;
13933 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
13934 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
13935 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
13936 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
13938 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
13939 bool is_doacross = false;
13940 if (c && walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt),
13941 find_standalone_omp_ordered, NULL))
13943 OMP_CLAUSE_ORDERED_DOACROSS (c) = 1;
13944 is_doacross = true;
13945 int len = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
13946 gimplify_omp_ctxp->loop_iter_var.create (len * 2);
13947 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
13948 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LINEAR)
13950 error_at (OMP_CLAUSE_LOCATION (*pc),
13951 "%<linear%> clause may not be specified together "
13952 "with %<ordered%> clause if stand-alone %<ordered%> "
13953 "construct is nested in it");
13954 *pc = OMP_CLAUSE_CHAIN (*pc);
13956 else
13957 pc = &OMP_CLAUSE_CHAIN (*pc);
13959 int collapse = 1, tile = 0;
13960 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
13961 if (c)
13962 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
13963 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
13964 if (c)
13965 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
13966 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
13967 hash_set<tree> *allocate_uids = NULL;
13968 if (c)
13970 allocate_uids = new hash_set<tree>;
13971 for (; c; c = OMP_CLAUSE_CHAIN (c))
13972 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
13973 allocate_uids->add (OMP_CLAUSE_DECL (c));
13975 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13977 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13978 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13979 decl = TREE_OPERAND (t, 0);
13980 gcc_assert (DECL_P (decl));
13981 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
13982 || POINTER_TYPE_P (TREE_TYPE (decl)));
13983 if (is_doacross)
13985 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
13987 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
13988 if (TREE_CODE (orig_decl) == TREE_LIST)
13990 orig_decl = TREE_PURPOSE (orig_decl);
13991 if (!orig_decl)
13992 orig_decl = decl;
13994 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
13996 else
13997 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
13998 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
14001 if (for_stmt == orig_for_stmt)
14003 tree orig_decl = decl;
14004 if (OMP_FOR_ORIG_DECLS (for_stmt))
14006 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14007 if (TREE_CODE (orig_decl) == TREE_LIST)
14009 orig_decl = TREE_PURPOSE (orig_decl);
14010 if (!orig_decl)
14011 orig_decl = decl;
14014 if (is_global_var (orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
14015 error_at (EXPR_LOCATION (for_stmt),
14016 "threadprivate iteration variable %qD", orig_decl);
14019 /* Make sure the iteration variable is private. */
14020 tree c = NULL_TREE;
14021 tree c2 = NULL_TREE;
14022 if (orig_for_stmt != for_stmt)
14024 /* Preserve this information until we gimplify the inner simd. */
14025 if (has_decl_expr
14026 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14027 TREE_PRIVATE (t) = 1;
14029 else if (ort == ORT_SIMD)
14031 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14032 (splay_tree_key) decl);
14033 omp_is_private (gimplify_omp_ctxp, decl,
14034 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14035 != 1));
14036 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
14038 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14039 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
14040 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14041 OMP_CLAUSE_LASTPRIVATE);
14042 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14043 OMP_CLAUSE_LASTPRIVATE))
14044 if (OMP_CLAUSE_DECL (c3) == decl)
14046 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14047 "conditional %<lastprivate%> on loop "
14048 "iterator %qD ignored", decl);
14049 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14050 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14053 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
14055 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14056 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
14057 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
14058 if ((has_decl_expr
14059 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14060 || TREE_PRIVATE (t))
14062 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14063 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14065 struct gimplify_omp_ctx *outer
14066 = gimplify_omp_ctxp->outer_context;
14067 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14069 if (outer->region_type == ORT_WORKSHARE
14070 && outer->combined_loop)
14072 n = splay_tree_lookup (outer->variables,
14073 (splay_tree_key)decl);
14074 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14076 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14077 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14079 else
14081 struct gimplify_omp_ctx *octx = outer->outer_context;
14082 if (octx
14083 && octx->region_type == ORT_COMBINED_PARALLEL
14084 && octx->outer_context
14085 && (octx->outer_context->region_type
14086 == ORT_WORKSHARE)
14087 && octx->outer_context->combined_loop)
14089 octx = octx->outer_context;
14090 n = splay_tree_lookup (octx->variables,
14091 (splay_tree_key)decl);
14092 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14094 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14095 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14102 OMP_CLAUSE_DECL (c) = decl;
14103 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14104 OMP_FOR_CLAUSES (for_stmt) = c;
14105 omp_add_variable (gimplify_omp_ctxp, decl, flags);
14106 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14107 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14108 true);
14110 else
14112 bool lastprivate
14113 = (!has_decl_expr
14114 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
14115 if (TREE_PRIVATE (t))
14116 lastprivate = false;
14117 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
14119 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14120 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
14121 lastprivate = false;
14124 struct gimplify_omp_ctx *outer
14125 = gimplify_omp_ctxp->outer_context;
14126 if (outer && lastprivate)
14127 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14128 true);
14130 c = build_omp_clause (input_location,
14131 lastprivate ? OMP_CLAUSE_LASTPRIVATE
14132 : OMP_CLAUSE_PRIVATE);
14133 OMP_CLAUSE_DECL (c) = decl;
14134 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14135 OMP_FOR_CLAUSES (for_stmt) = c;
14136 omp_add_variable (gimplify_omp_ctxp, decl,
14137 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
14138 | GOVD_EXPLICIT | GOVD_SEEN);
14139 c = NULL_TREE;
14142 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
14144 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14145 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14146 (splay_tree_key) decl);
14147 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
14148 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14149 OMP_CLAUSE_LASTPRIVATE);
14150 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14151 OMP_CLAUSE_LASTPRIVATE))
14152 if (OMP_CLAUSE_DECL (c3) == decl)
14154 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14155 "conditional %<lastprivate%> on loop "
14156 "iterator %qD ignored", decl);
14157 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14158 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14161 else
14162 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
14164 /* If DECL is not a gimple register, create a temporary variable to act
14165 as an iteration counter. This is valid, since DECL cannot be
14166 modified in the body of the loop. Similarly for any iteration vars
14167 in simd with collapse > 1 where the iterator vars must be
14168 lastprivate. And similarly for vars mentioned in allocate clauses. */
14169 if (orig_for_stmt != for_stmt)
14170 var = decl;
14171 else if (!is_gimple_reg (decl)
14172 || (ort == ORT_SIMD
14173 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
14174 || (allocate_uids && allocate_uids->contains (decl)))
14176 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14177 /* Make sure omp_add_variable is not called on it prematurely.
14178 We call it ourselves a few lines later. */
14179 gimplify_omp_ctxp = NULL;
14180 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14181 gimplify_omp_ctxp = ctx;
14182 TREE_OPERAND (t, 0) = var;
14184 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
14186 if (ort == ORT_SIMD
14187 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
14189 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14190 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
14191 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
14192 OMP_CLAUSE_DECL (c2) = var;
14193 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
14194 OMP_FOR_CLAUSES (for_stmt) = c2;
14195 omp_add_variable (gimplify_omp_ctxp, var,
14196 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
14197 if (c == NULL_TREE)
14199 c = c2;
14200 c2 = NULL_TREE;
14203 else
14204 omp_add_variable (gimplify_omp_ctxp, var,
14205 GOVD_PRIVATE | GOVD_SEEN);
14207 else
14208 var = decl;
14210 gimplify_omp_ctxp->in_for_exprs = true;
14211 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14213 tree lb = TREE_OPERAND (t, 1);
14214 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
14215 is_gimple_val, fb_rvalue, false);
14216 ret = MIN (ret, tret);
14217 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
14218 is_gimple_val, fb_rvalue, false);
14220 else
14221 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14222 is_gimple_val, fb_rvalue, false);
14223 gimplify_omp_ctxp->in_for_exprs = false;
14224 ret = MIN (ret, tret);
14225 if (ret == GS_ERROR)
14226 return ret;
14228 /* Handle OMP_FOR_COND. */
14229 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14230 gcc_assert (COMPARISON_CLASS_P (t));
14231 gcc_assert (TREE_OPERAND (t, 0) == decl);
14233 gimplify_omp_ctxp->in_for_exprs = true;
14234 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14236 tree ub = TREE_OPERAND (t, 1);
14237 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
14238 is_gimple_val, fb_rvalue, false);
14239 ret = MIN (ret, tret);
14240 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
14241 is_gimple_val, fb_rvalue, false);
14243 else
14244 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14245 is_gimple_val, fb_rvalue, false);
14246 gimplify_omp_ctxp->in_for_exprs = false;
14247 ret = MIN (ret, tret);
14249 /* Handle OMP_FOR_INCR. */
14250 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14251 switch (TREE_CODE (t))
14253 case PREINCREMENT_EXPR:
14254 case POSTINCREMENT_EXPR:
14256 tree decl = TREE_OPERAND (t, 0);
14257 /* c_omp_for_incr_canonicalize_ptr() should have been
14258 called to massage things appropriately. */
14259 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14261 if (orig_for_stmt != for_stmt)
14262 break;
14263 t = build_int_cst (TREE_TYPE (decl), 1);
14264 if (c)
14265 OMP_CLAUSE_LINEAR_STEP (c) = t;
14266 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14267 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14268 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14269 break;
14272 case PREDECREMENT_EXPR:
14273 case POSTDECREMENT_EXPR:
14274 /* c_omp_for_incr_canonicalize_ptr() should have been
14275 called to massage things appropriately. */
14276 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14277 if (orig_for_stmt != for_stmt)
14278 break;
14279 t = build_int_cst (TREE_TYPE (decl), -1);
14280 if (c)
14281 OMP_CLAUSE_LINEAR_STEP (c) = t;
14282 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14283 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14284 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14285 break;
14287 case MODIFY_EXPR:
14288 gcc_assert (TREE_OPERAND (t, 0) == decl);
14289 TREE_OPERAND (t, 0) = var;
14291 t = TREE_OPERAND (t, 1);
14292 switch (TREE_CODE (t))
14294 case PLUS_EXPR:
14295 if (TREE_OPERAND (t, 1) == decl)
14297 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
14298 TREE_OPERAND (t, 0) = var;
14299 break;
14302 /* Fallthru. */
14303 case MINUS_EXPR:
14304 case POINTER_PLUS_EXPR:
14305 gcc_assert (TREE_OPERAND (t, 0) == decl);
14306 TREE_OPERAND (t, 0) = var;
14307 break;
14308 default:
14309 gcc_unreachable ();
14312 gimplify_omp_ctxp->in_for_exprs = true;
14313 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14314 is_gimple_val, fb_rvalue, false);
14315 ret = MIN (ret, tret);
14316 if (c)
14318 tree step = TREE_OPERAND (t, 1);
14319 tree stept = TREE_TYPE (decl);
14320 if (POINTER_TYPE_P (stept))
14321 stept = sizetype;
14322 step = fold_convert (stept, step);
14323 if (TREE_CODE (t) == MINUS_EXPR)
14324 step = fold_build1 (NEGATE_EXPR, stept, step);
14325 OMP_CLAUSE_LINEAR_STEP (c) = step;
14326 if (step != TREE_OPERAND (t, 1))
14328 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
14329 &for_pre_body, NULL,
14330 is_gimple_val, fb_rvalue, false);
14331 ret = MIN (ret, tret);
14334 gimplify_omp_ctxp->in_for_exprs = false;
14335 break;
14337 default:
14338 gcc_unreachable ();
14341 if (c2)
14343 gcc_assert (c);
14344 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
14347 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
14349 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
14350 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14351 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
14352 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14353 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
14354 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
14355 && OMP_CLAUSE_DECL (c) == decl)
14357 if (is_doacross && (collapse == 1 || i >= collapse))
14358 t = var;
14359 else
14361 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14362 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14363 gcc_assert (TREE_OPERAND (t, 0) == var);
14364 t = TREE_OPERAND (t, 1);
14365 gcc_assert (TREE_CODE (t) == PLUS_EXPR
14366 || TREE_CODE (t) == MINUS_EXPR
14367 || TREE_CODE (t) == POINTER_PLUS_EXPR);
14368 gcc_assert (TREE_OPERAND (t, 0) == var);
14369 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
14370 is_doacross ? var : decl,
14371 TREE_OPERAND (t, 1));
14373 gimple_seq *seq;
14374 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
14375 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
14376 else
14377 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
14378 push_gimplify_context ();
14379 gimplify_assign (decl, t, seq);
14380 gimple *bind = NULL;
14381 if (gimplify_ctxp->temps)
14383 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
14384 *seq = NULL;
14385 gimplify_seq_add_stmt (seq, bind);
14387 pop_gimplify_context (bind);
14390 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
14391 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14393 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14394 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14395 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14396 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14397 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14398 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14399 gcc_assert (COMPARISON_CLASS_P (t));
14400 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14401 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14402 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14406 BITMAP_FREE (has_decl_expr);
14407 delete allocate_uids;
14409 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14410 || (loop_p && orig_for_stmt == for_stmt))
14412 push_gimplify_context ();
14413 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
14415 OMP_FOR_BODY (orig_for_stmt)
14416 = build3 (BIND_EXPR, void_type_node, NULL,
14417 OMP_FOR_BODY (orig_for_stmt), NULL);
14418 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
14422 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
14423 &for_body);
14425 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14426 || (loop_p && orig_for_stmt == for_stmt))
14428 if (gimple_code (g) == GIMPLE_BIND)
14429 pop_gimplify_context (g);
14430 else
14431 pop_gimplify_context (NULL);
14434 if (orig_for_stmt != for_stmt)
14435 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14437 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14438 decl = TREE_OPERAND (t, 0);
14439 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14440 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14441 gimplify_omp_ctxp = ctx->outer_context;
14442 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14443 gimplify_omp_ctxp = ctx;
14444 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
14445 TREE_OPERAND (t, 0) = var;
14446 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14447 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14448 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
14449 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14450 for (int j = i + 1;
14451 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14453 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14454 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14455 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14456 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14458 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14459 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14461 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14462 gcc_assert (COMPARISON_CLASS_P (t));
14463 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14464 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14466 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14467 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14472 gimplify_adjust_omp_clauses (pre_p, for_body,
14473 &OMP_FOR_CLAUSES (orig_for_stmt),
14474 TREE_CODE (orig_for_stmt));
14476 int kind;
14477 switch (TREE_CODE (orig_for_stmt))
14479 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
14480 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
14481 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
14482 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
14483 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
14484 default:
14485 gcc_unreachable ();
14487 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
14489 gimplify_seq_add_seq (pre_p, for_pre_body);
14490 for_pre_body = NULL;
14492 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
14493 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
14494 for_pre_body);
14495 if (orig_for_stmt != for_stmt)
14496 gimple_omp_for_set_combined_p (gfor, true);
14497 if (gimplify_omp_ctxp
14498 && (gimplify_omp_ctxp->combined_loop
14499 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
14500 && gimplify_omp_ctxp->outer_context
14501 && gimplify_omp_ctxp->outer_context->combined_loop)))
14503 gimple_omp_for_set_combined_into_p (gfor, true);
14504 if (gimplify_omp_ctxp->combined_loop)
14505 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
14506 else
14507 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
14510 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14512 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14513 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
14514 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
14515 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14516 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
14517 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
14518 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14519 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
14522 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
14523 constructs with GIMPLE_OMP_TASK sandwiched in between them.
14524 The outer taskloop stands for computing the number of iterations,
14525 counts for collapsed loops and holding taskloop specific clauses.
14526 The task construct stands for the effect of data sharing on the
14527 explicit task it creates and the inner taskloop stands for expansion
14528 of the static loop inside of the explicit task construct. */
14529 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14531 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
14532 tree task_clauses = NULL_TREE;
14533 tree c = *gfor_clauses_ptr;
14534 tree *gtask_clauses_ptr = &task_clauses;
14535 tree outer_for_clauses = NULL_TREE;
14536 tree *gforo_clauses_ptr = &outer_for_clauses;
14537 bitmap lastprivate_uids = NULL;
14538 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
14540 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
14541 if (c)
14543 lastprivate_uids = BITMAP_ALLOC (NULL);
14544 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14545 OMP_CLAUSE_LASTPRIVATE))
14546 bitmap_set_bit (lastprivate_uids,
14547 DECL_UID (OMP_CLAUSE_DECL (c)));
14549 c = *gfor_clauses_ptr;
14551 for (; c; c = OMP_CLAUSE_CHAIN (c))
14552 switch (OMP_CLAUSE_CODE (c))
14554 /* These clauses are allowed on task, move them there. */
14555 case OMP_CLAUSE_SHARED:
14556 case OMP_CLAUSE_FIRSTPRIVATE:
14557 case OMP_CLAUSE_DEFAULT:
14558 case OMP_CLAUSE_IF:
14559 case OMP_CLAUSE_UNTIED:
14560 case OMP_CLAUSE_FINAL:
14561 case OMP_CLAUSE_MERGEABLE:
14562 case OMP_CLAUSE_PRIORITY:
14563 case OMP_CLAUSE_REDUCTION:
14564 case OMP_CLAUSE_IN_REDUCTION:
14565 *gtask_clauses_ptr = c;
14566 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14567 break;
14568 case OMP_CLAUSE_PRIVATE:
14569 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
14571 /* We want private on outer for and firstprivate
14572 on task. */
14573 *gtask_clauses_ptr
14574 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14575 OMP_CLAUSE_FIRSTPRIVATE);
14576 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14577 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14578 openacc);
14579 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14580 *gforo_clauses_ptr = c;
14581 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14583 else
14585 *gtask_clauses_ptr = c;
14586 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14588 break;
14589 /* These clauses go into outer taskloop clauses. */
14590 case OMP_CLAUSE_GRAINSIZE:
14591 case OMP_CLAUSE_NUM_TASKS:
14592 case OMP_CLAUSE_NOGROUP:
14593 *gforo_clauses_ptr = c;
14594 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14595 break;
14596 /* Collapse clause we duplicate on both taskloops. */
14597 case OMP_CLAUSE_COLLAPSE:
14598 *gfor_clauses_ptr = c;
14599 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14600 *gforo_clauses_ptr = copy_node (c);
14601 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14602 break;
14603 /* For lastprivate, keep the clause on inner taskloop, and add
14604 a shared clause on task. If the same decl is also firstprivate,
14605 add also firstprivate clause on the inner taskloop. */
14606 case OMP_CLAUSE_LASTPRIVATE:
14607 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
14609 /* For taskloop C++ lastprivate IVs, we want:
14610 1) private on outer taskloop
14611 2) firstprivate and shared on task
14612 3) lastprivate on inner taskloop */
14613 *gtask_clauses_ptr
14614 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14615 OMP_CLAUSE_FIRSTPRIVATE);
14616 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14617 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14618 openacc);
14619 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14620 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
14621 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14622 OMP_CLAUSE_PRIVATE);
14623 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
14624 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
14625 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
14626 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14628 *gfor_clauses_ptr = c;
14629 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14630 *gtask_clauses_ptr
14631 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
14632 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14633 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
14634 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
14635 gtask_clauses_ptr
14636 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14637 break;
14638 /* Allocate clause we duplicate on task and inner taskloop
14639 if the decl is lastprivate, otherwise just put on task. */
14640 case OMP_CLAUSE_ALLOCATE:
14641 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
14642 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
14644 /* Additionally, put firstprivate clause on task
14645 for the allocator if it is not constant. */
14646 *gtask_clauses_ptr
14647 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14648 OMP_CLAUSE_FIRSTPRIVATE);
14649 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
14650 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
14651 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14653 if (lastprivate_uids
14654 && bitmap_bit_p (lastprivate_uids,
14655 DECL_UID (OMP_CLAUSE_DECL (c))))
14657 *gfor_clauses_ptr = c;
14658 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14659 *gtask_clauses_ptr = copy_node (c);
14660 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14662 else
14664 *gtask_clauses_ptr = c;
14665 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14667 break;
14668 default:
14669 gcc_unreachable ();
14671 *gfor_clauses_ptr = NULL_TREE;
14672 *gtask_clauses_ptr = NULL_TREE;
14673 *gforo_clauses_ptr = NULL_TREE;
14674 BITMAP_FREE (lastprivate_uids);
14675 gimple_set_location (gfor, input_location);
14676 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
14677 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
14678 NULL_TREE, NULL_TREE, NULL_TREE);
14679 gimple_set_location (g, input_location);
14680 gimple_omp_task_set_taskloop_p (g, true);
14681 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
14682 gomp_for *gforo
14683 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
14684 gimple_omp_for_collapse (gfor),
14685 gimple_omp_for_pre_body (gfor));
14686 gimple_omp_for_set_pre_body (gfor, NULL);
14687 gimple_omp_for_set_combined_p (gforo, true);
14688 gimple_omp_for_set_combined_into_p (gfor, true);
14689 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
14691 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
14692 tree v = create_tmp_var (type);
14693 gimple_omp_for_set_index (gforo, i, v);
14694 t = unshare_expr (gimple_omp_for_initial (gfor, i));
14695 gimple_omp_for_set_initial (gforo, i, t);
14696 gimple_omp_for_set_cond (gforo, i,
14697 gimple_omp_for_cond (gfor, i));
14698 t = unshare_expr (gimple_omp_for_final (gfor, i));
14699 gimple_omp_for_set_final (gforo, i, t);
14700 t = unshare_expr (gimple_omp_for_incr (gfor, i));
14701 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
14702 TREE_OPERAND (t, 0) = v;
14703 gimple_omp_for_set_incr (gforo, i, t);
14704 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
14705 OMP_CLAUSE_DECL (t) = v;
14706 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
14707 gimple_omp_for_set_clauses (gforo, t);
14708 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14710 tree *p1 = NULL, *p2 = NULL;
14711 t = gimple_omp_for_initial (gforo, i);
14712 if (TREE_CODE (t) == TREE_VEC)
14713 p1 = &TREE_VEC_ELT (t, 0);
14714 t = gimple_omp_for_final (gforo, i);
14715 if (TREE_CODE (t) == TREE_VEC)
14717 if (p1)
14718 p2 = &TREE_VEC_ELT (t, 0);
14719 else
14720 p1 = &TREE_VEC_ELT (t, 0);
14722 if (p1)
14724 int j;
14725 for (j = 0; j < i; j++)
14726 if (*p1 == gimple_omp_for_index (gfor, j))
14728 *p1 = gimple_omp_for_index (gforo, j);
14729 if (p2)
14730 *p2 = *p1;
14731 break;
14733 gcc_assert (j < i);
14737 gimplify_seq_add_stmt (pre_p, gforo);
14739 else
14740 gimplify_seq_add_stmt (pre_p, gfor);
14742 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
14744 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14745 unsigned lastprivate_conditional = 0;
14746 while (ctx
14747 && (ctx->region_type == ORT_TARGET_DATA
14748 || ctx->region_type == ORT_TASKGROUP))
14749 ctx = ctx->outer_context;
14750 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
14751 for (tree c = gimple_omp_for_clauses (gfor);
14752 c; c = OMP_CLAUSE_CHAIN (c))
14753 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14754 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
14755 ++lastprivate_conditional;
14756 if (lastprivate_conditional)
14758 struct omp_for_data fd;
14759 omp_extract_for_data (gfor, &fd, NULL);
14760 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
14761 lastprivate_conditional);
14762 tree var = create_tmp_var_raw (type);
14763 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
14764 OMP_CLAUSE_DECL (c) = var;
14765 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
14766 gimple_omp_for_set_clauses (gfor, c);
14767 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
14770 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
14772 unsigned lastprivate_conditional = 0;
14773 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
14774 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14775 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
14776 ++lastprivate_conditional;
14777 if (lastprivate_conditional)
14779 struct omp_for_data fd;
14780 omp_extract_for_data (gfor, &fd, NULL);
14781 tree type = unsigned_type_for (fd.iter_type);
14782 while (lastprivate_conditional--)
14784 tree c = build_omp_clause (UNKNOWN_LOCATION,
14785 OMP_CLAUSE__CONDTEMP_);
14786 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
14787 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
14788 gimple_omp_for_set_clauses (gfor, c);
14793 if (ret != GS_ALL_DONE)
14794 return GS_ERROR;
14795 *expr_p = NULL_TREE;
14796 return GS_ALL_DONE;
14799 /* Helper for gimplify_omp_loop, called through walk_tree. */
14801 static tree
14802 note_no_context_vars (tree *tp, int *, void *data)
14804 if (VAR_P (*tp)
14805 && DECL_CONTEXT (*tp) == NULL_TREE
14806 && !is_global_var (*tp))
14808 vec<tree> *d = (vec<tree> *) data;
14809 d->safe_push (*tp);
14810 DECL_CONTEXT (*tp) = current_function_decl;
14812 return NULL_TREE;
14815 /* Gimplify the gross structure of an OMP_LOOP statement. */
14817 static enum gimplify_status
14818 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
14820 tree for_stmt = *expr_p;
14821 tree clauses = OMP_FOR_CLAUSES (for_stmt);
14822 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
14823 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
14824 int i;
14826 /* If order is not present, the behavior is as if order(concurrent)
14827 appeared. */
14828 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
14829 if (order == NULL_TREE)
14831 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
14832 OMP_CLAUSE_CHAIN (order) = clauses;
14833 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
14836 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
14837 if (bind == NULL_TREE)
14839 if (!flag_openmp) /* flag_openmp_simd */
14841 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
14842 kind = OMP_CLAUSE_BIND_TEAMS;
14843 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
14844 kind = OMP_CLAUSE_BIND_PARALLEL;
14845 else
14847 for (; octx; octx = octx->outer_context)
14849 if ((octx->region_type & ORT_ACC) != 0
14850 || octx->region_type == ORT_NONE
14851 || octx->region_type == ORT_IMPLICIT_TARGET)
14852 continue;
14853 break;
14855 if (octx == NULL && !in_omp_construct)
14856 error_at (EXPR_LOCATION (for_stmt),
14857 "%<bind%> clause not specified on a %<loop%> "
14858 "construct not nested inside another OpenMP construct");
14860 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
14861 OMP_CLAUSE_CHAIN (bind) = clauses;
14862 OMP_CLAUSE_BIND_KIND (bind) = kind;
14863 OMP_FOR_CLAUSES (for_stmt) = bind;
14865 else
14866 switch (OMP_CLAUSE_BIND_KIND (bind))
14868 case OMP_CLAUSE_BIND_THREAD:
14869 break;
14870 case OMP_CLAUSE_BIND_PARALLEL:
14871 if (!flag_openmp) /* flag_openmp_simd */
14873 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14874 break;
14876 for (; octx; octx = octx->outer_context)
14877 if (octx->region_type == ORT_SIMD
14878 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
14880 error_at (EXPR_LOCATION (for_stmt),
14881 "%<bind(parallel)%> on a %<loop%> construct nested "
14882 "inside %<simd%> construct");
14883 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14884 break;
14886 kind = OMP_CLAUSE_BIND_PARALLEL;
14887 break;
14888 case OMP_CLAUSE_BIND_TEAMS:
14889 if (!flag_openmp) /* flag_openmp_simd */
14891 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14892 break;
14894 if ((octx
14895 && octx->region_type != ORT_IMPLICIT_TARGET
14896 && octx->region_type != ORT_NONE
14897 && (octx->region_type & ORT_TEAMS) == 0)
14898 || in_omp_construct)
14900 error_at (EXPR_LOCATION (for_stmt),
14901 "%<bind(teams)%> on a %<loop%> region not strictly "
14902 "nested inside of a %<teams%> region");
14903 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14904 break;
14906 kind = OMP_CLAUSE_BIND_TEAMS;
14907 break;
14908 default:
14909 gcc_unreachable ();
14912 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
14913 switch (OMP_CLAUSE_CODE (*pc))
14915 case OMP_CLAUSE_REDUCTION:
14916 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
14918 error_at (OMP_CLAUSE_LOCATION (*pc),
14919 "%<inscan%> %<reduction%> clause on "
14920 "%qs construct", "loop");
14921 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
14923 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
14925 error_at (OMP_CLAUSE_LOCATION (*pc),
14926 "invalid %<task%> reduction modifier on construct "
14927 "other than %<parallel%>, %qs or %<sections%>",
14928 lang_GNU_Fortran () ? "do" : "for");
14929 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
14931 pc = &OMP_CLAUSE_CHAIN (*pc);
14932 break;
14933 case OMP_CLAUSE_LASTPRIVATE:
14934 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14936 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14937 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14938 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
14939 break;
14940 if (OMP_FOR_ORIG_DECLS (for_stmt)
14941 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
14942 i)) == TREE_LIST
14943 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
14944 i)))
14946 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14947 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
14948 break;
14951 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
14953 error_at (OMP_CLAUSE_LOCATION (*pc),
14954 "%<lastprivate%> clause on a %<loop%> construct refers "
14955 "to a variable %qD which is not the loop iterator",
14956 OMP_CLAUSE_DECL (*pc));
14957 *pc = OMP_CLAUSE_CHAIN (*pc);
14958 break;
14960 pc = &OMP_CLAUSE_CHAIN (*pc);
14961 break;
14962 default:
14963 pc = &OMP_CLAUSE_CHAIN (*pc);
14964 break;
14967 TREE_SET_CODE (for_stmt, OMP_SIMD);
14969 int last;
14970 switch (kind)
14972 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
14973 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
14974 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
14976 for (int pass = 1; pass <= last; pass++)
14978 if (pass == 2)
14980 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
14981 make_node (BLOCK));
14982 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
14983 *expr_p = make_node (OMP_PARALLEL);
14984 TREE_TYPE (*expr_p) = void_type_node;
14985 OMP_PARALLEL_BODY (*expr_p) = bind;
14986 OMP_PARALLEL_COMBINED (*expr_p) = 1;
14987 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
14988 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
14989 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14990 if (OMP_FOR_ORIG_DECLS (for_stmt)
14991 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
14992 == TREE_LIST))
14994 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14995 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
14997 *pc = build_omp_clause (UNKNOWN_LOCATION,
14998 OMP_CLAUSE_FIRSTPRIVATE);
14999 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
15000 pc = &OMP_CLAUSE_CHAIN (*pc);
15004 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
15005 tree *pc = &OMP_FOR_CLAUSES (t);
15006 TREE_TYPE (t) = void_type_node;
15007 OMP_FOR_BODY (t) = *expr_p;
15008 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
15009 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
15010 switch (OMP_CLAUSE_CODE (c))
15012 case OMP_CLAUSE_BIND:
15013 case OMP_CLAUSE_ORDER:
15014 case OMP_CLAUSE_COLLAPSE:
15015 *pc = copy_node (c);
15016 pc = &OMP_CLAUSE_CHAIN (*pc);
15017 break;
15018 case OMP_CLAUSE_PRIVATE:
15019 case OMP_CLAUSE_FIRSTPRIVATE:
15020 /* Only needed on innermost. */
15021 break;
15022 case OMP_CLAUSE_LASTPRIVATE:
15023 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
15025 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
15026 OMP_CLAUSE_FIRSTPRIVATE);
15027 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
15028 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15029 pc = &OMP_CLAUSE_CHAIN (*pc);
15031 *pc = copy_node (c);
15032 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
15033 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15034 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
15036 if (pass != last)
15037 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
15038 else
15039 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15040 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
15042 pc = &OMP_CLAUSE_CHAIN (*pc);
15043 break;
15044 case OMP_CLAUSE_REDUCTION:
15045 *pc = copy_node (c);
15046 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
15047 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15048 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
15050 auto_vec<tree> no_context_vars;
15051 int walk_subtrees = 0;
15052 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15053 &walk_subtrees, &no_context_vars);
15054 if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
15055 note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
15056 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
15057 note_no_context_vars,
15058 &no_context_vars);
15059 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
15060 note_no_context_vars,
15061 &no_context_vars);
15063 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
15064 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
15065 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15066 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
15067 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
15069 hash_map<tree, tree> decl_map;
15070 decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
15071 decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15072 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
15073 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15074 decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
15075 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
15077 copy_body_data id;
15078 memset (&id, 0, sizeof (id));
15079 id.src_fn = current_function_decl;
15080 id.dst_fn = current_function_decl;
15081 id.src_cfun = cfun;
15082 id.decl_map = &decl_map;
15083 id.copy_decl = copy_decl_no_change;
15084 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
15085 id.transform_new_cfg = true;
15086 id.transform_return_to_modify = false;
15087 id.eh_lp_nr = 0;
15088 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
15089 &id, NULL);
15090 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
15091 &id, NULL);
15093 for (tree d : no_context_vars)
15095 DECL_CONTEXT (d) = NULL_TREE;
15096 DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
15099 else
15101 OMP_CLAUSE_REDUCTION_INIT (*pc)
15102 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
15103 OMP_CLAUSE_REDUCTION_MERGE (*pc)
15104 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
15106 pc = &OMP_CLAUSE_CHAIN (*pc);
15107 break;
15108 default:
15109 gcc_unreachable ();
15111 *pc = NULL_TREE;
15112 *expr_p = t;
15114 return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
15118 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
15119 of OMP_TARGET's body. */
15121 static tree
15122 find_omp_teams (tree *tp, int *walk_subtrees, void *)
15124 *walk_subtrees = 0;
15125 switch (TREE_CODE (*tp))
15127 case OMP_TEAMS:
15128 return *tp;
15129 case BIND_EXPR:
15130 case STATEMENT_LIST:
15131 *walk_subtrees = 1;
15132 break;
15133 default:
15134 break;
15136 return NULL_TREE;
15139 /* Helper function of optimize_target_teams, determine if the expression
15140 can be computed safely before the target construct on the host. */
15142 static tree
15143 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
15145 splay_tree_node n;
15147 if (TYPE_P (*tp))
15149 *walk_subtrees = 0;
15150 return NULL_TREE;
15152 switch (TREE_CODE (*tp))
15154 case VAR_DECL:
15155 case PARM_DECL:
15156 case RESULT_DECL:
15157 *walk_subtrees = 0;
15158 if (error_operand_p (*tp)
15159 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
15160 || DECL_HAS_VALUE_EXPR_P (*tp)
15161 || DECL_THREAD_LOCAL_P (*tp)
15162 || TREE_SIDE_EFFECTS (*tp)
15163 || TREE_THIS_VOLATILE (*tp))
15164 return *tp;
15165 if (is_global_var (*tp)
15166 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
15167 || lookup_attribute ("omp declare target link",
15168 DECL_ATTRIBUTES (*tp))))
15169 return *tp;
15170 if (VAR_P (*tp)
15171 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
15172 && !is_global_var (*tp)
15173 && decl_function_context (*tp) == current_function_decl)
15174 return *tp;
15175 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15176 (splay_tree_key) *tp);
15177 if (n == NULL)
15179 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
15180 return NULL_TREE;
15181 return *tp;
15183 else if (n->value & GOVD_LOCAL)
15184 return *tp;
15185 else if (n->value & GOVD_FIRSTPRIVATE)
15186 return NULL_TREE;
15187 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15188 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15189 return NULL_TREE;
15190 return *tp;
15191 case INTEGER_CST:
15192 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15193 return *tp;
15194 return NULL_TREE;
15195 case TARGET_EXPR:
15196 if (TARGET_EXPR_INITIAL (*tp)
15197 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
15198 return *tp;
15199 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
15200 walk_subtrees, NULL);
15201 /* Allow some reasonable subset of integral arithmetics. */
15202 case PLUS_EXPR:
15203 case MINUS_EXPR:
15204 case MULT_EXPR:
15205 case TRUNC_DIV_EXPR:
15206 case CEIL_DIV_EXPR:
15207 case FLOOR_DIV_EXPR:
15208 case ROUND_DIV_EXPR:
15209 case TRUNC_MOD_EXPR:
15210 case CEIL_MOD_EXPR:
15211 case FLOOR_MOD_EXPR:
15212 case ROUND_MOD_EXPR:
15213 case RDIV_EXPR:
15214 case EXACT_DIV_EXPR:
15215 case MIN_EXPR:
15216 case MAX_EXPR:
15217 case LSHIFT_EXPR:
15218 case RSHIFT_EXPR:
15219 case BIT_IOR_EXPR:
15220 case BIT_XOR_EXPR:
15221 case BIT_AND_EXPR:
15222 case NEGATE_EXPR:
15223 case ABS_EXPR:
15224 case BIT_NOT_EXPR:
15225 case NON_LVALUE_EXPR:
15226 CASE_CONVERT:
15227 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15228 return *tp;
15229 return NULL_TREE;
15230 /* And disallow anything else, except for comparisons. */
15231 default:
15232 if (COMPARISON_CLASS_P (*tp))
15233 return NULL_TREE;
15234 return *tp;
15238 /* Try to determine if the num_teams and/or thread_limit expressions
15239 can have their values determined already before entering the
15240 target construct.
15241 INTEGER_CSTs trivially are,
15242 integral decls that are firstprivate (explicitly or implicitly)
15243 or explicitly map(always, to:) or map(always, tofrom:) on the target
15244 region too, and expressions involving simple arithmetics on those
15245 too, function calls are not ok, dereferencing something neither etc.
15246 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
15247 EXPR based on what we find:
15248 0 stands for clause not specified at all, use implementation default
15249 -1 stands for value that can't be determined easily before entering
15250 the target construct.
15251 -2 means that no explicit teams construct was specified
15252 If teams construct is not present at all, use 1 for num_teams
15253 and 0 for thread_limit (only one team is involved, and the thread
15254 limit is implementation defined. */
15256 static void
15257 optimize_target_teams (tree target, gimple_seq *pre_p)
15259 tree body = OMP_BODY (target);
15260 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
15261 tree num_teams_lower = NULL_TREE;
15262 tree num_teams_upper = integer_zero_node;
15263 tree thread_limit = integer_zero_node;
15264 location_t num_teams_loc = EXPR_LOCATION (target);
15265 location_t thread_limit_loc = EXPR_LOCATION (target);
15266 tree c, *p, expr;
15267 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
15269 if (teams == NULL_TREE)
15270 num_teams_upper = build_int_cst (integer_type_node, -2);
15271 else
15272 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
15274 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
15276 p = &num_teams_upper;
15277 num_teams_loc = OMP_CLAUSE_LOCATION (c);
15278 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
15280 expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
15281 if (TREE_CODE (expr) == INTEGER_CST)
15282 num_teams_lower = expr;
15283 else if (walk_tree (&expr, computable_teams_clause,
15284 NULL, NULL))
15285 num_teams_lower = integer_minus_one_node;
15286 else
15288 num_teams_lower = expr;
15289 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15290 if (gimplify_expr (&num_teams_lower, pre_p, NULL,
15291 is_gimple_val, fb_rvalue, false)
15292 == GS_ERROR)
15294 gimplify_omp_ctxp = target_ctx;
15295 num_teams_lower = integer_minus_one_node;
15297 else
15299 gimplify_omp_ctxp = target_ctx;
15300 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15301 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
15302 = num_teams_lower;
15307 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
15309 p = &thread_limit;
15310 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
15312 else
15313 continue;
15314 expr = OMP_CLAUSE_OPERAND (c, 0);
15315 if (TREE_CODE (expr) == INTEGER_CST)
15317 *p = expr;
15318 continue;
15320 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
15322 *p = integer_minus_one_node;
15323 continue;
15325 *p = expr;
15326 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15327 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
15328 == GS_ERROR)
15330 gimplify_omp_ctxp = target_ctx;
15331 *p = integer_minus_one_node;
15332 continue;
15334 gimplify_omp_ctxp = target_ctx;
15335 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15336 OMP_CLAUSE_OPERAND (c, 0) = *p;
15338 if (!omp_find_clause (OMP_TARGET_CLAUSES (target), OMP_CLAUSE_THREAD_LIMIT))
15340 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
15341 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
15342 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15343 OMP_TARGET_CLAUSES (target) = c;
15345 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
15346 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
15347 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
15348 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15349 OMP_TARGET_CLAUSES (target) = c;
15352 /* Gimplify the gross structure of several OMP constructs. */
15354 static void
15355 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
15357 tree expr = *expr_p;
15358 gimple *stmt;
15359 gimple_seq body = NULL;
15360 enum omp_region_type ort;
15362 switch (TREE_CODE (expr))
15364 case OMP_SECTIONS:
15365 case OMP_SINGLE:
15366 ort = ORT_WORKSHARE;
15367 break;
15368 case OMP_SCOPE:
15369 ort = ORT_TASKGROUP;
15370 break;
15371 case OMP_TARGET:
15372 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
15373 break;
15374 case OACC_KERNELS:
15375 ort = ORT_ACC_KERNELS;
15376 break;
15377 case OACC_PARALLEL:
15378 ort = ORT_ACC_PARALLEL;
15379 break;
15380 case OACC_SERIAL:
15381 ort = ORT_ACC_SERIAL;
15382 break;
15383 case OACC_DATA:
15384 ort = ORT_ACC_DATA;
15385 break;
15386 case OMP_TARGET_DATA:
15387 ort = ORT_TARGET_DATA;
15388 break;
15389 case OMP_TEAMS:
15390 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
15391 if (gimplify_omp_ctxp == NULL
15392 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
15393 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
15394 break;
15395 case OACC_HOST_DATA:
15396 ort = ORT_ACC_HOST_DATA;
15397 break;
15398 default:
15399 gcc_unreachable ();
15402 bool save_in_omp_construct = in_omp_construct;
15403 if ((ort & ORT_ACC) == 0)
15404 in_omp_construct = false;
15405 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
15406 TREE_CODE (expr));
15407 if (TREE_CODE (expr) == OMP_TARGET)
15408 optimize_target_teams (expr, pre_p);
15409 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
15410 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15412 push_gimplify_context ();
15413 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
15414 if (gimple_code (g) == GIMPLE_BIND)
15415 pop_gimplify_context (g);
15416 else
15417 pop_gimplify_context (NULL);
15418 if ((ort & ORT_TARGET_DATA) != 0)
15420 enum built_in_function end_ix;
15421 switch (TREE_CODE (expr))
15423 case OACC_DATA:
15424 case OACC_HOST_DATA:
15425 end_ix = BUILT_IN_GOACC_DATA_END;
15426 break;
15427 case OMP_TARGET_DATA:
15428 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
15429 break;
15430 default:
15431 gcc_unreachable ();
15433 tree fn = builtin_decl_explicit (end_ix);
15434 g = gimple_build_call (fn, 0);
15435 gimple_seq cleanup = NULL;
15436 gimple_seq_add_stmt (&cleanup, g);
15437 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
15438 body = NULL;
15439 gimple_seq_add_stmt (&body, g);
15442 else
15443 gimplify_and_add (OMP_BODY (expr), &body);
15444 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
15445 TREE_CODE (expr));
15446 in_omp_construct = save_in_omp_construct;
15448 switch (TREE_CODE (expr))
15450 case OACC_DATA:
15451 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
15452 OMP_CLAUSES (expr));
15453 break;
15454 case OACC_HOST_DATA:
15455 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
15457 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15458 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
15459 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
15462 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
15463 OMP_CLAUSES (expr));
15464 break;
15465 case OACC_KERNELS:
15466 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
15467 OMP_CLAUSES (expr));
15468 break;
15469 case OACC_PARALLEL:
15470 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
15471 OMP_CLAUSES (expr));
15472 break;
15473 case OACC_SERIAL:
15474 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
15475 OMP_CLAUSES (expr));
15476 break;
15477 case OMP_SECTIONS:
15478 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
15479 break;
15480 case OMP_SINGLE:
15481 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
15482 break;
15483 case OMP_SCOPE:
15484 stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
15485 break;
15486 case OMP_TARGET:
15487 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
15488 OMP_CLAUSES (expr));
15489 break;
15490 case OMP_TARGET_DATA:
15491 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
15492 to be evaluated before the use_device_{ptr,addr} clauses if they
15493 refer to the same variables. */
15495 tree use_device_clauses;
15496 tree *pc, *uc = &use_device_clauses;
15497 for (pc = &OMP_CLAUSES (expr); *pc; )
15498 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
15499 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
15501 *uc = *pc;
15502 *pc = OMP_CLAUSE_CHAIN (*pc);
15503 uc = &OMP_CLAUSE_CHAIN (*uc);
15505 else
15506 pc = &OMP_CLAUSE_CHAIN (*pc);
15507 *uc = NULL_TREE;
15508 *pc = use_device_clauses;
15509 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
15510 OMP_CLAUSES (expr));
15512 break;
15513 case OMP_TEAMS:
15514 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
15515 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15516 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
15517 break;
15518 default:
15519 gcc_unreachable ();
15522 gimplify_seq_add_stmt (pre_p, stmt);
15523 *expr_p = NULL_TREE;
15526 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
15527 target update constructs. */
15529 static void
15530 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
15532 tree expr = *expr_p;
15533 int kind;
15534 gomp_target *stmt;
15535 enum omp_region_type ort = ORT_WORKSHARE;
15537 switch (TREE_CODE (expr))
15539 case OACC_ENTER_DATA:
15540 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
15541 ort = ORT_ACC;
15542 break;
15543 case OACC_EXIT_DATA:
15544 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
15545 ort = ORT_ACC;
15546 break;
15547 case OACC_UPDATE:
15548 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
15549 ort = ORT_ACC;
15550 break;
15551 case OMP_TARGET_UPDATE:
15552 kind = GF_OMP_TARGET_KIND_UPDATE;
15553 break;
15554 case OMP_TARGET_ENTER_DATA:
15555 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
15556 break;
15557 case OMP_TARGET_EXIT_DATA:
15558 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
15559 break;
15560 default:
15561 gcc_unreachable ();
15563 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
15564 ort, TREE_CODE (expr));
15565 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
15566 TREE_CODE (expr));
15567 if (TREE_CODE (expr) == OACC_UPDATE
15568 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15569 OMP_CLAUSE_IF_PRESENT))
15571 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
15572 clause. */
15573 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15574 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15575 switch (OMP_CLAUSE_MAP_KIND (c))
15577 case GOMP_MAP_FORCE_TO:
15578 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
15579 break;
15580 case GOMP_MAP_FORCE_FROM:
15581 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
15582 break;
15583 default:
15584 break;
15587 else if (TREE_CODE (expr) == OACC_EXIT_DATA
15588 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15589 OMP_CLAUSE_FINALIZE))
15591 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
15592 semantics. */
15593 bool have_clause = false;
15594 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15595 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15596 switch (OMP_CLAUSE_MAP_KIND (c))
15598 case GOMP_MAP_FROM:
15599 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
15600 have_clause = true;
15601 break;
15602 case GOMP_MAP_RELEASE:
15603 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
15604 have_clause = true;
15605 break;
15606 case GOMP_MAP_TO_PSET:
15607 /* Fortran arrays with descriptors must map that descriptor when
15608 doing standalone "attach" operations (in OpenACC). In that
15609 case GOMP_MAP_TO_PSET appears by itself with no preceding
15610 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
15611 break;
15612 case GOMP_MAP_POINTER:
15613 /* TODO PR92929: we may see these here, but they'll always follow
15614 one of the clauses above, and will be handled by libgomp as
15615 one group, so no handling required here. */
15616 gcc_assert (have_clause);
15617 break;
15618 case GOMP_MAP_DETACH:
15619 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
15620 have_clause = false;
15621 break;
15622 case GOMP_MAP_STRUCT:
15623 have_clause = false;
15624 break;
15625 default:
15626 gcc_unreachable ();
15629 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
15631 gimplify_seq_add_stmt (pre_p, stmt);
15632 *expr_p = NULL_TREE;
15635 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
15636 stabilized the lhs of the atomic operation as *ADDR. Return true if
15637 EXPR is this stabilized form. */
15639 static bool
15640 goa_lhs_expr_p (tree expr, tree addr)
15642 /* Also include casts to other type variants. The C front end is fond
15643 of adding these for e.g. volatile variables. This is like
15644 STRIP_TYPE_NOPS but includes the main variant lookup. */
15645 STRIP_USELESS_TYPE_CONVERSION (expr);
15647 if (TREE_CODE (expr) == INDIRECT_REF)
15649 expr = TREE_OPERAND (expr, 0);
15650 while (expr != addr
15651 && (CONVERT_EXPR_P (expr)
15652 || TREE_CODE (expr) == NON_LVALUE_EXPR)
15653 && TREE_CODE (expr) == TREE_CODE (addr)
15654 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
15656 expr = TREE_OPERAND (expr, 0);
15657 addr = TREE_OPERAND (addr, 0);
15659 if (expr == addr)
15660 return true;
15661 return (TREE_CODE (addr) == ADDR_EXPR
15662 && TREE_CODE (expr) == ADDR_EXPR
15663 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
15665 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
15666 return true;
15667 return false;
15670 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
15671 expression does not involve the lhs, evaluate it into a temporary.
15672 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
15673 or -1 if an error was encountered. */
15675 static int
15676 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
15677 tree lhs_var, tree &target_expr, bool rhs, int depth)
15679 tree expr = *expr_p;
15680 int saw_lhs = 0;
15682 if (goa_lhs_expr_p (expr, lhs_addr))
15684 if (pre_p)
15685 *expr_p = lhs_var;
15686 return 1;
15688 if (is_gimple_val (expr))
15689 return 0;
15691 /* Maximum depth of lhs in expression is for the
15692 __builtin_clear_padding (...), __builtin_clear_padding (...),
15693 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
15694 if (++depth > 7)
15695 goto finish;
15697 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
15699 case tcc_binary:
15700 case tcc_comparison:
15701 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
15702 lhs_var, target_expr, true, depth);
15703 /* FALLTHRU */
15704 case tcc_unary:
15705 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
15706 lhs_var, target_expr, true, depth);
15707 break;
15708 case tcc_expression:
15709 switch (TREE_CODE (expr))
15711 case TRUTH_ANDIF_EXPR:
15712 case TRUTH_ORIF_EXPR:
15713 case TRUTH_AND_EXPR:
15714 case TRUTH_OR_EXPR:
15715 case TRUTH_XOR_EXPR:
15716 case BIT_INSERT_EXPR:
15717 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15718 lhs_addr, lhs_var, target_expr, true,
15719 depth);
15720 /* FALLTHRU */
15721 case TRUTH_NOT_EXPR:
15722 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15723 lhs_addr, lhs_var, target_expr, true,
15724 depth);
15725 break;
15726 case MODIFY_EXPR:
15727 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
15728 target_expr, true, depth))
15729 break;
15730 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15731 lhs_addr, lhs_var, target_expr, true,
15732 depth);
15733 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15734 lhs_addr, lhs_var, target_expr, false,
15735 depth);
15736 break;
15737 /* FALLTHRU */
15738 case ADDR_EXPR:
15739 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
15740 target_expr, true, depth))
15741 break;
15742 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15743 lhs_addr, lhs_var, target_expr, false,
15744 depth);
15745 break;
15746 case COMPOUND_EXPR:
15747 /* Break out any preevaluations from cp_build_modify_expr. */
15748 for (; TREE_CODE (expr) == COMPOUND_EXPR;
15749 expr = TREE_OPERAND (expr, 1))
15751 /* Special-case __builtin_clear_padding call before
15752 __builtin_memcmp. */
15753 if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
15755 tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
15756 if (fndecl
15757 && fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
15758 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
15759 && (!pre_p
15760 || goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL,
15761 lhs_addr, lhs_var,
15762 target_expr, true, depth)))
15764 if (pre_p)
15765 *expr_p = expr;
15766 saw_lhs = goa_stabilize_expr (&TREE_OPERAND (expr, 0),
15767 pre_p, lhs_addr, lhs_var,
15768 target_expr, true, depth);
15769 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1),
15770 pre_p, lhs_addr, lhs_var,
15771 target_expr, rhs, depth);
15772 return saw_lhs;
15776 if (pre_p)
15777 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
15779 if (!pre_p)
15780 return goa_stabilize_expr (&expr, pre_p, lhs_addr, lhs_var,
15781 target_expr, rhs, depth);
15782 *expr_p = expr;
15783 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
15784 target_expr, rhs, depth);
15785 case COND_EXPR:
15786 if (!goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL, lhs_addr,
15787 lhs_var, target_expr, true, depth))
15788 break;
15789 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15790 lhs_addr, lhs_var, target_expr, true,
15791 depth);
15792 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15793 lhs_addr, lhs_var, target_expr, true,
15794 depth);
15795 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 2), pre_p,
15796 lhs_addr, lhs_var, target_expr, true,
15797 depth);
15798 break;
15799 case TARGET_EXPR:
15800 if (TARGET_EXPR_INITIAL (expr))
15802 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
15803 lhs_var, target_expr, true,
15804 depth))
15805 break;
15806 if (expr == target_expr)
15807 saw_lhs = 1;
15808 else
15810 saw_lhs = goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr),
15811 pre_p, lhs_addr, lhs_var,
15812 target_expr, true, depth);
15813 if (saw_lhs && target_expr == NULL_TREE && pre_p)
15814 target_expr = expr;
15817 break;
15818 default:
15819 break;
15821 break;
15822 case tcc_reference:
15823 if (TREE_CODE (expr) == BIT_FIELD_REF
15824 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
15825 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15826 lhs_addr, lhs_var, target_expr, true,
15827 depth);
15828 break;
15829 case tcc_vl_exp:
15830 if (TREE_CODE (expr) == CALL_EXPR)
15832 if (tree fndecl = get_callee_fndecl (expr))
15833 if (fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
15834 || fndecl_built_in_p (fndecl, BUILT_IN_MEMCMP))
15836 int nargs = call_expr_nargs (expr);
15837 for (int i = 0; i < nargs; i++)
15838 saw_lhs |= goa_stabilize_expr (&CALL_EXPR_ARG (expr, i),
15839 pre_p, lhs_addr, lhs_var,
15840 target_expr, true, depth);
15843 break;
15844 default:
15845 break;
15848 finish:
15849 if (saw_lhs == 0 && pre_p)
15851 enum gimplify_status gs;
15852 if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
15854 gimplify_stmt (&expr, pre_p);
15855 return saw_lhs;
15857 else if (rhs)
15858 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
15859 else
15860 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
15861 if (gs != GS_ALL_DONE)
15862 saw_lhs = -1;
15865 return saw_lhs;
15868 /* Gimplify an OMP_ATOMIC statement. */
15870 static enum gimplify_status
15871 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
15873 tree addr = TREE_OPERAND (*expr_p, 0);
15874 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
15875 ? NULL : TREE_OPERAND (*expr_p, 1);
15876 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
15877 tree tmp_load;
15878 gomp_atomic_load *loadstmt;
15879 gomp_atomic_store *storestmt;
15880 tree target_expr = NULL_TREE;
15882 tmp_load = create_tmp_reg (type);
15883 if (rhs
15884 && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load, target_expr,
15885 true, 0) < 0)
15886 return GS_ERROR;
15888 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
15889 != GS_ALL_DONE)
15890 return GS_ERROR;
15892 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
15893 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
15894 gimplify_seq_add_stmt (pre_p, loadstmt);
15895 if (rhs)
15897 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
15898 representatives. Use BIT_FIELD_REF on the lhs instead. */
15899 tree rhsarg = rhs;
15900 if (TREE_CODE (rhs) == COND_EXPR)
15901 rhsarg = TREE_OPERAND (rhs, 1);
15902 if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
15903 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
15905 tree bitpos = TREE_OPERAND (rhsarg, 2);
15906 tree op1 = TREE_OPERAND (rhsarg, 1);
15907 tree bitsize;
15908 tree tmp_store = tmp_load;
15909 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
15910 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
15911 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
15912 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
15913 else
15914 bitsize = TYPE_SIZE (TREE_TYPE (op1));
15915 gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
15916 tree t = build2_loc (EXPR_LOCATION (rhsarg),
15917 MODIFY_EXPR, void_type_node,
15918 build3_loc (EXPR_LOCATION (rhsarg),
15919 BIT_FIELD_REF, TREE_TYPE (op1),
15920 tmp_store, bitsize, bitpos), op1);
15921 if (TREE_CODE (rhs) == COND_EXPR)
15922 t = build3_loc (EXPR_LOCATION (rhs), COND_EXPR, void_type_node,
15923 TREE_OPERAND (rhs, 0), t, void_node);
15924 gimplify_and_add (t, pre_p);
15925 rhs = tmp_store;
15927 bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
15928 if (TREE_CODE (rhs) == COND_EXPR)
15929 gimplify_ctxp->allow_rhs_cond_expr = true;
15930 enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
15931 is_gimple_val, fb_rvalue);
15932 gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
15933 if (gs != GS_ALL_DONE)
15934 return GS_ERROR;
15937 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
15938 rhs = tmp_load;
15939 storestmt
15940 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
15941 if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
15943 gimple_omp_atomic_set_weak (loadstmt);
15944 gimple_omp_atomic_set_weak (storestmt);
15946 gimplify_seq_add_stmt (pre_p, storestmt);
15947 switch (TREE_CODE (*expr_p))
15949 case OMP_ATOMIC_READ:
15950 case OMP_ATOMIC_CAPTURE_OLD:
15951 *expr_p = tmp_load;
15952 gimple_omp_atomic_set_need_value (loadstmt);
15953 break;
15954 case OMP_ATOMIC_CAPTURE_NEW:
15955 *expr_p = rhs;
15956 gimple_omp_atomic_set_need_value (storestmt);
15957 break;
15958 default:
15959 *expr_p = NULL;
15960 break;
15963 return GS_ALL_DONE;
15966 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
15967 body, and adding some EH bits. */
15969 static enum gimplify_status
15970 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
15972 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
15973 gimple *body_stmt;
15974 gtransaction *trans_stmt;
15975 gimple_seq body = NULL;
15976 int subcode = 0;
15978 /* Wrap the transaction body in a BIND_EXPR so we have a context
15979 where to put decls for OMP. */
15980 if (TREE_CODE (tbody) != BIND_EXPR)
15982 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
15983 TREE_SIDE_EFFECTS (bind) = 1;
15984 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
15985 TRANSACTION_EXPR_BODY (expr) = bind;
15988 push_gimplify_context ();
15989 temp = voidify_wrapper_expr (*expr_p, NULL);
15991 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
15992 pop_gimplify_context (body_stmt);
15994 trans_stmt = gimple_build_transaction (body);
15995 if (TRANSACTION_EXPR_OUTER (expr))
15996 subcode = GTMA_IS_OUTER;
15997 else if (TRANSACTION_EXPR_RELAXED (expr))
15998 subcode = GTMA_IS_RELAXED;
15999 gimple_transaction_set_subcode (trans_stmt, subcode);
16001 gimplify_seq_add_stmt (pre_p, trans_stmt);
16003 if (temp)
16005 *expr_p = temp;
16006 return GS_OK;
16009 *expr_p = NULL_TREE;
16010 return GS_ALL_DONE;
16013 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
16014 is the OMP_BODY of the original EXPR (which has already been
16015 gimplified so it's not present in the EXPR).
16017 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
16019 static gimple *
16020 gimplify_omp_ordered (tree expr, gimple_seq body)
16022 tree c, decls;
16023 int failures = 0;
16024 unsigned int i;
16025 tree source_c = NULL_TREE;
16026 tree sink_c = NULL_TREE;
16028 if (gimplify_omp_ctxp)
16030 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
16031 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16032 && gimplify_omp_ctxp->loop_iter_var.is_empty ())
16034 error_at (OMP_CLAUSE_LOCATION (c),
16035 "%<ordered%> construct with %qs clause must be "
16036 "closely nested inside a loop with %<ordered%> clause",
16037 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross");
16038 failures++;
16040 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16041 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
16043 bool fail = false;
16044 sink_c = c;
16045 if (OMP_CLAUSE_DECL (c) == NULL_TREE)
16046 continue; /* omp_cur_iteration - 1 */
16047 for (decls = OMP_CLAUSE_DECL (c), i = 0;
16048 decls && TREE_CODE (decls) == TREE_LIST;
16049 decls = TREE_CHAIN (decls), ++i)
16050 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
16051 continue;
16052 else if (TREE_VALUE (decls)
16053 != gimplify_omp_ctxp->loop_iter_var[2 * i])
16055 error_at (OMP_CLAUSE_LOCATION (c),
16056 "variable %qE is not an iteration "
16057 "of outermost loop %d, expected %qE",
16058 TREE_VALUE (decls), i + 1,
16059 gimplify_omp_ctxp->loop_iter_var[2 * i]);
16060 fail = true;
16061 failures++;
16063 else
16064 TREE_VALUE (decls)
16065 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
16066 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
16068 error_at (OMP_CLAUSE_LOCATION (c),
16069 "number of variables in %qs clause with "
16070 "%<sink%> modifier does not match number of "
16071 "iteration variables",
16072 OMP_CLAUSE_DOACROSS_DEPEND (c)
16073 ? "depend" : "doacross");
16074 failures++;
16077 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16078 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SOURCE)
16080 if (source_c)
16082 error_at (OMP_CLAUSE_LOCATION (c),
16083 "more than one %qs clause with %<source%> "
16084 "modifier on an %<ordered%> construct",
16085 OMP_CLAUSE_DOACROSS_DEPEND (source_c)
16086 ? "depend" : "doacross");
16087 failures++;
16089 else
16090 source_c = c;
16093 if (source_c && sink_c)
16095 error_at (OMP_CLAUSE_LOCATION (source_c),
16096 "%qs clause with %<source%> modifier specified "
16097 "together with %qs clauses with %<sink%> modifier "
16098 "on the same construct",
16099 OMP_CLAUSE_DOACROSS_DEPEND (source_c) ? "depend" : "doacross",
16100 OMP_CLAUSE_DOACROSS_DEPEND (sink_c) ? "depend" : "doacross");
16101 failures++;
16104 if (failures)
16105 return gimple_build_nop ();
16106 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
16109 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
16110 expression produces a value to be used as an operand inside a GIMPLE
16111 statement, the value will be stored back in *EXPR_P. This value will
16112 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
16113 an SSA_NAME. The corresponding sequence of GIMPLE statements is
16114 emitted in PRE_P and POST_P.
16116 Additionally, this process may overwrite parts of the input
16117 expression during gimplification. Ideally, it should be
16118 possible to do non-destructive gimplification.
16120 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
16121 the expression needs to evaluate to a value to be used as
16122 an operand in a GIMPLE statement, this value will be stored in
16123 *EXPR_P on exit. This happens when the caller specifies one
16124 of fb_lvalue or fb_rvalue fallback flags.
16126 PRE_P will contain the sequence of GIMPLE statements corresponding
16127 to the evaluation of EXPR and all the side-effects that must
16128 be executed before the main expression. On exit, the last
16129 statement of PRE_P is the core statement being gimplified. For
16130 instance, when gimplifying 'if (++a)' the last statement in
16131 PRE_P will be 'if (t.1)' where t.1 is the result of
16132 pre-incrementing 'a'.
16134 POST_P will contain the sequence of GIMPLE statements corresponding
16135 to the evaluation of all the side-effects that must be executed
16136 after the main expression. If this is NULL, the post
16137 side-effects are stored at the end of PRE_P.
16139 The reason why the output is split in two is to handle post
16140 side-effects explicitly. In some cases, an expression may have
16141 inner and outer post side-effects which need to be emitted in
16142 an order different from the one given by the recursive
16143 traversal. For instance, for the expression (*p--)++ the post
16144 side-effects of '--' must actually occur *after* the post
16145 side-effects of '++'. However, gimplification will first visit
16146 the inner expression, so if a separate POST sequence was not
16147 used, the resulting sequence would be:
16149 1 t.1 = *p
16150 2 p = p - 1
16151 3 t.2 = t.1 + 1
16152 4 *p = t.2
16154 However, the post-decrement operation in line #2 must not be
16155 evaluated until after the store to *p at line #4, so the
16156 correct sequence should be:
16158 1 t.1 = *p
16159 2 t.2 = t.1 + 1
16160 3 *p = t.2
16161 4 p = p - 1
16163 So, by specifying a separate post queue, it is possible
16164 to emit the post side-effects in the correct order.
16165 If POST_P is NULL, an internal queue will be used. Before
16166 returning to the caller, the sequence POST_P is appended to
16167 the main output sequence PRE_P.
16169 GIMPLE_TEST_F points to a function that takes a tree T and
16170 returns nonzero if T is in the GIMPLE form requested by the
16171 caller. The GIMPLE predicates are in gimple.cc.
16173 FALLBACK tells the function what sort of a temporary we want if
16174 gimplification cannot produce an expression that complies with
16175 GIMPLE_TEST_F.
16177 fb_none means that no temporary should be generated
16178 fb_rvalue means that an rvalue is OK to generate
16179 fb_lvalue means that an lvalue is OK to generate
16180 fb_either means that either is OK, but an lvalue is preferable.
16181 fb_mayfail means that gimplification may fail (in which case
16182 GS_ERROR will be returned)
16184 The return value is either GS_ERROR or GS_ALL_DONE, since this
16185 function iterates until EXPR is completely gimplified or an error
16186 occurs. */
16188 enum gimplify_status
16189 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
16190 bool (*gimple_test_f) (tree), fallback_t fallback)
16192 tree tmp;
16193 gimple_seq internal_pre = NULL;
16194 gimple_seq internal_post = NULL;
16195 tree save_expr;
16196 bool is_statement;
16197 location_t saved_location;
16198 enum gimplify_status ret;
16199 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
16200 tree label;
16202 save_expr = *expr_p;
16203 if (save_expr == NULL_TREE)
16204 return GS_ALL_DONE;
16206 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
16207 is_statement = gimple_test_f == is_gimple_stmt;
16208 if (is_statement)
16209 gcc_assert (pre_p);
16211 /* Consistency checks. */
16212 if (gimple_test_f == is_gimple_reg)
16213 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
16214 else if (gimple_test_f == is_gimple_val
16215 || gimple_test_f == is_gimple_call_addr
16216 || gimple_test_f == is_gimple_condexpr_for_cond
16217 || gimple_test_f == is_gimple_mem_rhs
16218 || gimple_test_f == is_gimple_mem_rhs_or_call
16219 || gimple_test_f == is_gimple_reg_rhs
16220 || gimple_test_f == is_gimple_reg_rhs_or_call
16221 || gimple_test_f == is_gimple_asm_val
16222 || gimple_test_f == is_gimple_mem_ref_addr)
16223 gcc_assert (fallback & fb_rvalue);
16224 else if (gimple_test_f == is_gimple_min_lval
16225 || gimple_test_f == is_gimple_lvalue)
16226 gcc_assert (fallback & fb_lvalue);
16227 else if (gimple_test_f == is_gimple_addressable)
16228 gcc_assert (fallback & fb_either);
16229 else if (gimple_test_f == is_gimple_stmt)
16230 gcc_assert (fallback == fb_none);
16231 else
16233 /* We should have recognized the GIMPLE_TEST_F predicate to
16234 know what kind of fallback to use in case a temporary is
16235 needed to hold the value or address of *EXPR_P. */
16236 gcc_unreachable ();
16239 /* We used to check the predicate here and return immediately if it
16240 succeeds. This is wrong; the design is for gimplification to be
16241 idempotent, and for the predicates to only test for valid forms, not
16242 whether they are fully simplified. */
16243 if (pre_p == NULL)
16244 pre_p = &internal_pre;
16246 if (post_p == NULL)
16247 post_p = &internal_post;
16249 /* Remember the last statements added to PRE_P and POST_P. Every
16250 new statement added by the gimplification helpers needs to be
16251 annotated with location information. To centralize the
16252 responsibility, we remember the last statement that had been
16253 added to both queues before gimplifying *EXPR_P. If
16254 gimplification produces new statements in PRE_P and POST_P, those
16255 statements will be annotated with the same location information
16256 as *EXPR_P. */
16257 pre_last_gsi = gsi_last (*pre_p);
16258 post_last_gsi = gsi_last (*post_p);
16260 saved_location = input_location;
16261 if (save_expr != error_mark_node
16262 && EXPR_HAS_LOCATION (*expr_p))
16263 input_location = EXPR_LOCATION (*expr_p);
16265 /* Loop over the specific gimplifiers until the toplevel node
16266 remains the same. */
16269 /* Strip away as many useless type conversions as possible
16270 at the toplevel. */
16271 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
16273 /* Remember the expr. */
16274 save_expr = *expr_p;
16276 /* Die, die, die, my darling. */
16277 if (error_operand_p (save_expr))
16279 ret = GS_ERROR;
16280 break;
16283 /* Do any language-specific gimplification. */
16284 ret = ((enum gimplify_status)
16285 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
16286 if (ret == GS_OK)
16288 if (*expr_p == NULL_TREE)
16289 break;
16290 if (*expr_p != save_expr)
16291 continue;
16293 else if (ret != GS_UNHANDLED)
16294 break;
16296 /* Make sure that all the cases set 'ret' appropriately. */
16297 ret = GS_UNHANDLED;
16298 switch (TREE_CODE (*expr_p))
16300 /* First deal with the special cases. */
16302 case POSTINCREMENT_EXPR:
16303 case POSTDECREMENT_EXPR:
16304 case PREINCREMENT_EXPR:
16305 case PREDECREMENT_EXPR:
16306 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
16307 fallback != fb_none,
16308 TREE_TYPE (*expr_p));
16309 break;
16311 case VIEW_CONVERT_EXPR:
16312 if ((fallback & fb_rvalue)
16313 && is_gimple_reg_type (TREE_TYPE (*expr_p))
16314 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
16316 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16317 post_p, is_gimple_val, fb_rvalue);
16318 recalculate_side_effects (*expr_p);
16319 break;
16321 /* Fallthru. */
16323 case ARRAY_REF:
16324 case ARRAY_RANGE_REF:
16325 case REALPART_EXPR:
16326 case IMAGPART_EXPR:
16327 case COMPONENT_REF:
16328 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
16329 fallback ? fallback : fb_rvalue);
16330 break;
16332 case COND_EXPR:
16333 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
16335 /* C99 code may assign to an array in a structure value of a
16336 conditional expression, and this has undefined behavior
16337 only on execution, so create a temporary if an lvalue is
16338 required. */
16339 if (fallback == fb_lvalue)
16341 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16342 mark_addressable (*expr_p);
16343 ret = GS_OK;
16345 break;
16347 case CALL_EXPR:
16348 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
16350 /* C99 code may assign to an array in a structure returned
16351 from a function, and this has undefined behavior only on
16352 execution, so create a temporary if an lvalue is
16353 required. */
16354 if (fallback == fb_lvalue)
16356 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16357 mark_addressable (*expr_p);
16358 ret = GS_OK;
16360 break;
16362 case TREE_LIST:
16363 gcc_unreachable ();
16365 case COMPOUND_EXPR:
16366 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
16367 break;
16369 case COMPOUND_LITERAL_EXPR:
16370 ret = gimplify_compound_literal_expr (expr_p, pre_p,
16371 gimple_test_f, fallback);
16372 break;
16374 case MODIFY_EXPR:
16375 case INIT_EXPR:
16376 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
16377 fallback != fb_none);
16378 break;
16380 case TRUTH_ANDIF_EXPR:
16381 case TRUTH_ORIF_EXPR:
16383 /* Preserve the original type of the expression and the
16384 source location of the outer expression. */
16385 tree org_type = TREE_TYPE (*expr_p);
16386 *expr_p = gimple_boolify (*expr_p);
16387 *expr_p = build3_loc (input_location, COND_EXPR,
16388 org_type, *expr_p,
16389 fold_convert_loc
16390 (input_location,
16391 org_type, boolean_true_node),
16392 fold_convert_loc
16393 (input_location,
16394 org_type, boolean_false_node));
16395 ret = GS_OK;
16396 break;
16399 case TRUTH_NOT_EXPR:
16401 tree type = TREE_TYPE (*expr_p);
16402 /* The parsers are careful to generate TRUTH_NOT_EXPR
16403 only with operands that are always zero or one.
16404 We do not fold here but handle the only interesting case
16405 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
16406 *expr_p = gimple_boolify (*expr_p);
16407 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
16408 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
16409 TREE_TYPE (*expr_p),
16410 TREE_OPERAND (*expr_p, 0));
16411 else
16412 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
16413 TREE_TYPE (*expr_p),
16414 TREE_OPERAND (*expr_p, 0),
16415 build_int_cst (TREE_TYPE (*expr_p), 1));
16416 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
16417 *expr_p = fold_convert_loc (input_location, type, *expr_p);
16418 ret = GS_OK;
16419 break;
16422 case ADDR_EXPR:
16423 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
16424 break;
16426 case ANNOTATE_EXPR:
16428 tree cond = TREE_OPERAND (*expr_p, 0);
16429 tree kind = TREE_OPERAND (*expr_p, 1);
16430 tree data = TREE_OPERAND (*expr_p, 2);
16431 tree type = TREE_TYPE (cond);
16432 if (!INTEGRAL_TYPE_P (type))
16434 *expr_p = cond;
16435 ret = GS_OK;
16436 break;
16438 tree tmp = create_tmp_var (type);
16439 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
16440 gcall *call
16441 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
16442 gimple_call_set_lhs (call, tmp);
16443 gimplify_seq_add_stmt (pre_p, call);
16444 *expr_p = tmp;
16445 ret = GS_ALL_DONE;
16446 break;
16449 case VA_ARG_EXPR:
16450 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
16451 break;
16453 CASE_CONVERT:
16454 if (IS_EMPTY_STMT (*expr_p))
16456 ret = GS_ALL_DONE;
16457 break;
16460 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
16461 || fallback == fb_none)
16463 /* Just strip a conversion to void (or in void context) and
16464 try again. */
16465 *expr_p = TREE_OPERAND (*expr_p, 0);
16466 ret = GS_OK;
16467 break;
16470 ret = gimplify_conversion (expr_p);
16471 if (ret == GS_ERROR)
16472 break;
16473 if (*expr_p != save_expr)
16474 break;
16475 /* FALLTHRU */
16477 case FIX_TRUNC_EXPR:
16478 /* unary_expr: ... | '(' cast ')' val | ... */
16479 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16480 is_gimple_val, fb_rvalue);
16481 recalculate_side_effects (*expr_p);
16482 break;
16484 case INDIRECT_REF:
16486 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
16487 bool notrap = TREE_THIS_NOTRAP (*expr_p);
16488 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
16490 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
16491 if (*expr_p != save_expr)
16493 ret = GS_OK;
16494 break;
16497 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16498 is_gimple_reg, fb_rvalue);
16499 if (ret == GS_ERROR)
16500 break;
16502 recalculate_side_effects (*expr_p);
16503 *expr_p = fold_build2_loc (input_location, MEM_REF,
16504 TREE_TYPE (*expr_p),
16505 TREE_OPERAND (*expr_p, 0),
16506 build_int_cst (saved_ptr_type, 0));
16507 TREE_THIS_VOLATILE (*expr_p) = volatilep;
16508 TREE_THIS_NOTRAP (*expr_p) = notrap;
16509 ret = GS_OK;
16510 break;
16513 /* We arrive here through the various re-gimplifcation paths. */
16514 case MEM_REF:
16515 /* First try re-folding the whole thing. */
16516 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
16517 TREE_OPERAND (*expr_p, 0),
16518 TREE_OPERAND (*expr_p, 1));
16519 if (tmp)
16521 REF_REVERSE_STORAGE_ORDER (tmp)
16522 = REF_REVERSE_STORAGE_ORDER (*expr_p);
16523 *expr_p = tmp;
16524 recalculate_side_effects (*expr_p);
16525 ret = GS_OK;
16526 break;
16528 /* Avoid re-gimplifying the address operand if it is already
16529 in suitable form. Re-gimplifying would mark the address
16530 operand addressable. Always gimplify when not in SSA form
16531 as we still may have to gimplify decls with value-exprs. */
16532 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
16533 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
16535 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16536 is_gimple_mem_ref_addr, fb_rvalue);
16537 if (ret == GS_ERROR)
16538 break;
16540 recalculate_side_effects (*expr_p);
16541 ret = GS_ALL_DONE;
16542 break;
16544 /* Constants need not be gimplified. */
16545 case INTEGER_CST:
16546 case REAL_CST:
16547 case FIXED_CST:
16548 case STRING_CST:
16549 case COMPLEX_CST:
16550 case VECTOR_CST:
16551 /* Drop the overflow flag on constants, we do not want
16552 that in the GIMPLE IL. */
16553 if (TREE_OVERFLOW_P (*expr_p))
16554 *expr_p = drop_tree_overflow (*expr_p);
16555 ret = GS_ALL_DONE;
16556 break;
16558 case CONST_DECL:
16559 /* If we require an lvalue, such as for ADDR_EXPR, retain the
16560 CONST_DECL node. Otherwise the decl is replaceable by its
16561 value. */
16562 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
16563 if (fallback & fb_lvalue)
16564 ret = GS_ALL_DONE;
16565 else
16567 *expr_p = DECL_INITIAL (*expr_p);
16568 ret = GS_OK;
16570 break;
16572 case DECL_EXPR:
16573 ret = gimplify_decl_expr (expr_p, pre_p);
16574 break;
16576 case BIND_EXPR:
16577 ret = gimplify_bind_expr (expr_p, pre_p);
16578 break;
16580 case LOOP_EXPR:
16581 ret = gimplify_loop_expr (expr_p, pre_p);
16582 break;
16584 case SWITCH_EXPR:
16585 ret = gimplify_switch_expr (expr_p, pre_p);
16586 break;
16588 case EXIT_EXPR:
16589 ret = gimplify_exit_expr (expr_p);
16590 break;
16592 case GOTO_EXPR:
16593 /* If the target is not LABEL, then it is a computed jump
16594 and the target needs to be gimplified. */
16595 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
16597 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
16598 NULL, is_gimple_val, fb_rvalue);
16599 if (ret == GS_ERROR)
16600 break;
16602 gimplify_seq_add_stmt (pre_p,
16603 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
16604 ret = GS_ALL_DONE;
16605 break;
16607 case PREDICT_EXPR:
16608 gimplify_seq_add_stmt (pre_p,
16609 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
16610 PREDICT_EXPR_OUTCOME (*expr_p)));
16611 ret = GS_ALL_DONE;
16612 break;
16614 case LABEL_EXPR:
16615 ret = gimplify_label_expr (expr_p, pre_p);
16616 label = LABEL_EXPR_LABEL (*expr_p);
16617 gcc_assert (decl_function_context (label) == current_function_decl);
16619 /* If the label is used in a goto statement, or address of the label
16620 is taken, we need to unpoison all variables that were seen so far.
16621 Doing so would prevent us from reporting a false positives. */
16622 if (asan_poisoned_variables
16623 && asan_used_labels != NULL
16624 && asan_used_labels->contains (label)
16625 && !gimplify_omp_ctxp)
16626 asan_poison_variables (asan_poisoned_variables, false, pre_p);
16627 break;
16629 case CASE_LABEL_EXPR:
16630 ret = gimplify_case_label_expr (expr_p, pre_p);
16632 if (gimplify_ctxp->live_switch_vars)
16633 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
16634 pre_p);
16635 break;
16637 case RETURN_EXPR:
16638 ret = gimplify_return_expr (*expr_p, pre_p);
16639 break;
16641 case CONSTRUCTOR:
16642 /* Don't reduce this in place; let gimplify_init_constructor work its
16643 magic. Buf if we're just elaborating this for side effects, just
16644 gimplify any element that has side-effects. */
16645 if (fallback == fb_none)
16647 unsigned HOST_WIDE_INT ix;
16648 tree val;
16649 tree temp = NULL_TREE;
16650 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
16651 if (TREE_SIDE_EFFECTS (val))
16652 append_to_statement_list (val, &temp);
16654 *expr_p = temp;
16655 ret = temp ? GS_OK : GS_ALL_DONE;
16657 /* C99 code may assign to an array in a constructed
16658 structure or union, and this has undefined behavior only
16659 on execution, so create a temporary if an lvalue is
16660 required. */
16661 else if (fallback == fb_lvalue)
16663 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16664 mark_addressable (*expr_p);
16665 ret = GS_OK;
16667 else
16668 ret = GS_ALL_DONE;
16669 break;
16671 /* The following are special cases that are not handled by the
16672 original GIMPLE grammar. */
16674 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
16675 eliminated. */
16676 case SAVE_EXPR:
16677 ret = gimplify_save_expr (expr_p, pre_p, post_p);
16678 break;
16680 case BIT_FIELD_REF:
16681 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16682 post_p, is_gimple_lvalue, fb_either);
16683 recalculate_side_effects (*expr_p);
16684 break;
16686 case TARGET_MEM_REF:
16688 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
16690 if (TMR_BASE (*expr_p))
16691 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
16692 post_p, is_gimple_mem_ref_addr, fb_either);
16693 if (TMR_INDEX (*expr_p))
16694 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
16695 post_p, is_gimple_val, fb_rvalue);
16696 if (TMR_INDEX2 (*expr_p))
16697 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
16698 post_p, is_gimple_val, fb_rvalue);
16699 /* TMR_STEP and TMR_OFFSET are always integer constants. */
16700 ret = MIN (r0, r1);
16702 break;
16704 case NON_LVALUE_EXPR:
16705 /* This should have been stripped above. */
16706 gcc_unreachable ();
16708 case ASM_EXPR:
16709 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
16710 break;
16712 case TRY_FINALLY_EXPR:
16713 case TRY_CATCH_EXPR:
16715 gimple_seq eval, cleanup;
16716 gtry *try_;
16718 /* Calls to destructors are generated automatically in FINALLY/CATCH
16719 block. They should have location as UNKNOWN_LOCATION. However,
16720 gimplify_call_expr will reset these call stmts to input_location
16721 if it finds stmt's location is unknown. To prevent resetting for
16722 destructors, we set the input_location to unknown.
16723 Note that this only affects the destructor calls in FINALLY/CATCH
16724 block, and will automatically reset to its original value by the
16725 end of gimplify_expr. */
16726 input_location = UNKNOWN_LOCATION;
16727 eval = cleanup = NULL;
16728 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
16729 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
16730 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
16732 gimple_seq n = NULL, e = NULL;
16733 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
16734 0), &n);
16735 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
16736 1), &e);
16737 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
16739 geh_else *stmt = gimple_build_eh_else (n, e);
16740 gimple_seq_add_stmt (&cleanup, stmt);
16743 else
16744 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
16745 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
16746 if (gimple_seq_empty_p (cleanup))
16748 gimple_seq_add_seq (pre_p, eval);
16749 ret = GS_ALL_DONE;
16750 break;
16752 try_ = gimple_build_try (eval, cleanup,
16753 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
16754 ? GIMPLE_TRY_FINALLY
16755 : GIMPLE_TRY_CATCH);
16756 if (EXPR_HAS_LOCATION (save_expr))
16757 gimple_set_location (try_, EXPR_LOCATION (save_expr));
16758 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
16759 gimple_set_location (try_, saved_location);
16760 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
16761 gimple_try_set_catch_is_cleanup (try_,
16762 TRY_CATCH_IS_CLEANUP (*expr_p));
16763 gimplify_seq_add_stmt (pre_p, try_);
16764 ret = GS_ALL_DONE;
16765 break;
16768 case CLEANUP_POINT_EXPR:
16769 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
16770 break;
16772 case TARGET_EXPR:
16773 ret = gimplify_target_expr (expr_p, pre_p, post_p);
16774 break;
16776 case CATCH_EXPR:
16778 gimple *c;
16779 gimple_seq handler = NULL;
16780 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
16781 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
16782 gimplify_seq_add_stmt (pre_p, c);
16783 ret = GS_ALL_DONE;
16784 break;
16787 case EH_FILTER_EXPR:
16789 gimple *ehf;
16790 gimple_seq failure = NULL;
16792 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
16793 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
16794 copy_warning (ehf, *expr_p);
16795 gimplify_seq_add_stmt (pre_p, ehf);
16796 ret = GS_ALL_DONE;
16797 break;
16800 case OBJ_TYPE_REF:
16802 enum gimplify_status r0, r1;
16803 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
16804 post_p, is_gimple_val, fb_rvalue);
16805 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
16806 post_p, is_gimple_val, fb_rvalue);
16807 TREE_SIDE_EFFECTS (*expr_p) = 0;
16808 ret = MIN (r0, r1);
16810 break;
16812 case LABEL_DECL:
16813 /* We get here when taking the address of a label. We mark
16814 the label as "forced"; meaning it can never be removed and
16815 it is a potential target for any computed goto. */
16816 FORCED_LABEL (*expr_p) = 1;
16817 ret = GS_ALL_DONE;
16818 break;
16820 case STATEMENT_LIST:
16821 ret = gimplify_statement_list (expr_p, pre_p);
16822 break;
16824 case WITH_SIZE_EXPR:
16826 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16827 post_p == &internal_post ? NULL : post_p,
16828 gimple_test_f, fallback);
16829 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
16830 is_gimple_val, fb_rvalue);
16831 ret = GS_ALL_DONE;
16833 break;
16835 case VAR_DECL:
16836 case PARM_DECL:
16837 ret = gimplify_var_or_parm_decl (expr_p);
16838 break;
16840 case RESULT_DECL:
16841 /* When within an OMP context, notice uses of variables. */
16842 if (gimplify_omp_ctxp)
16843 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
16844 ret = GS_ALL_DONE;
16845 break;
16847 case DEBUG_EXPR_DECL:
16848 gcc_unreachable ();
16850 case DEBUG_BEGIN_STMT:
16851 gimplify_seq_add_stmt (pre_p,
16852 gimple_build_debug_begin_stmt
16853 (TREE_BLOCK (*expr_p),
16854 EXPR_LOCATION (*expr_p)));
16855 ret = GS_ALL_DONE;
16856 *expr_p = NULL;
16857 break;
16859 case SSA_NAME:
16860 /* Allow callbacks into the gimplifier during optimization. */
16861 ret = GS_ALL_DONE;
16862 break;
16864 case OMP_PARALLEL:
16865 gimplify_omp_parallel (expr_p, pre_p);
16866 ret = GS_ALL_DONE;
16867 break;
16869 case OMP_TASK:
16870 gimplify_omp_task (expr_p, pre_p);
16871 ret = GS_ALL_DONE;
16872 break;
16874 case OMP_SIMD:
16876 /* Temporarily disable into_ssa, as scan_omp_simd
16877 which calls copy_gimple_seq_and_replace_locals can't deal
16878 with SSA_NAMEs defined outside of the body properly. */
16879 bool saved_into_ssa = gimplify_ctxp->into_ssa;
16880 gimplify_ctxp->into_ssa = false;
16881 ret = gimplify_omp_for (expr_p, pre_p);
16882 gimplify_ctxp->into_ssa = saved_into_ssa;
16883 break;
16886 case OMP_FOR:
16887 case OMP_DISTRIBUTE:
16888 case OMP_TASKLOOP:
16889 case OACC_LOOP:
16890 ret = gimplify_omp_for (expr_p, pre_p);
16891 break;
16893 case OMP_LOOP:
16894 ret = gimplify_omp_loop (expr_p, pre_p);
16895 break;
16897 case OACC_CACHE:
16898 gimplify_oacc_cache (expr_p, pre_p);
16899 ret = GS_ALL_DONE;
16900 break;
16902 case OACC_DECLARE:
16903 gimplify_oacc_declare (expr_p, pre_p);
16904 ret = GS_ALL_DONE;
16905 break;
16907 case OACC_HOST_DATA:
16908 case OACC_DATA:
16909 case OACC_KERNELS:
16910 case OACC_PARALLEL:
16911 case OACC_SERIAL:
16912 case OMP_SCOPE:
16913 case OMP_SECTIONS:
16914 case OMP_SINGLE:
16915 case OMP_TARGET:
16916 case OMP_TARGET_DATA:
16917 case OMP_TEAMS:
16918 gimplify_omp_workshare (expr_p, pre_p);
16919 ret = GS_ALL_DONE;
16920 break;
16922 case OACC_ENTER_DATA:
16923 case OACC_EXIT_DATA:
16924 case OACC_UPDATE:
16925 case OMP_TARGET_UPDATE:
16926 case OMP_TARGET_ENTER_DATA:
16927 case OMP_TARGET_EXIT_DATA:
16928 gimplify_omp_target_update (expr_p, pre_p);
16929 ret = GS_ALL_DONE;
16930 break;
16932 case OMP_SECTION:
16933 case OMP_MASTER:
16934 case OMP_MASKED:
16935 case OMP_ORDERED:
16936 case OMP_CRITICAL:
16937 case OMP_SCAN:
16939 gimple_seq body = NULL;
16940 gimple *g;
16941 bool saved_in_omp_construct = in_omp_construct;
16943 in_omp_construct = true;
16944 gimplify_and_add (OMP_BODY (*expr_p), &body);
16945 in_omp_construct = saved_in_omp_construct;
16946 switch (TREE_CODE (*expr_p))
16948 case OMP_SECTION:
16949 g = gimple_build_omp_section (body);
16950 break;
16951 case OMP_MASTER:
16952 g = gimple_build_omp_master (body);
16953 break;
16954 case OMP_ORDERED:
16955 g = gimplify_omp_ordered (*expr_p, body);
16956 if (OMP_BODY (*expr_p) == NULL_TREE
16957 && gimple_code (g) == GIMPLE_OMP_ORDERED)
16958 gimple_omp_ordered_standalone (g);
16959 break;
16960 case OMP_MASKED:
16961 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p),
16962 pre_p, ORT_WORKSHARE, OMP_MASKED);
16963 gimplify_adjust_omp_clauses (pre_p, body,
16964 &OMP_MASKED_CLAUSES (*expr_p),
16965 OMP_MASKED);
16966 g = gimple_build_omp_masked (body,
16967 OMP_MASKED_CLAUSES (*expr_p));
16968 break;
16969 case OMP_CRITICAL:
16970 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
16971 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
16972 gimplify_adjust_omp_clauses (pre_p, body,
16973 &OMP_CRITICAL_CLAUSES (*expr_p),
16974 OMP_CRITICAL);
16975 g = gimple_build_omp_critical (body,
16976 OMP_CRITICAL_NAME (*expr_p),
16977 OMP_CRITICAL_CLAUSES (*expr_p));
16978 break;
16979 case OMP_SCAN:
16980 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
16981 pre_p, ORT_WORKSHARE, OMP_SCAN);
16982 gimplify_adjust_omp_clauses (pre_p, body,
16983 &OMP_SCAN_CLAUSES (*expr_p),
16984 OMP_SCAN);
16985 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
16986 break;
16987 default:
16988 gcc_unreachable ();
16990 gimplify_seq_add_stmt (pre_p, g);
16991 ret = GS_ALL_DONE;
16992 break;
16995 case OMP_TASKGROUP:
16997 gimple_seq body = NULL;
16999 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
17000 bool saved_in_omp_construct = in_omp_construct;
17001 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
17002 OMP_TASKGROUP);
17003 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
17005 in_omp_construct = true;
17006 gimplify_and_add (OMP_BODY (*expr_p), &body);
17007 in_omp_construct = saved_in_omp_construct;
17008 gimple_seq cleanup = NULL;
17009 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
17010 gimple *g = gimple_build_call (fn, 0);
17011 gimple_seq_add_stmt (&cleanup, g);
17012 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
17013 body = NULL;
17014 gimple_seq_add_stmt (&body, g);
17015 g = gimple_build_omp_taskgroup (body, *pclauses);
17016 gimplify_seq_add_stmt (pre_p, g);
17017 ret = GS_ALL_DONE;
17018 break;
17021 case OMP_ATOMIC:
17022 case OMP_ATOMIC_READ:
17023 case OMP_ATOMIC_CAPTURE_OLD:
17024 case OMP_ATOMIC_CAPTURE_NEW:
17025 ret = gimplify_omp_atomic (expr_p, pre_p);
17026 break;
17028 case TRANSACTION_EXPR:
17029 ret = gimplify_transaction (expr_p, pre_p);
17030 break;
17032 case TRUTH_AND_EXPR:
17033 case TRUTH_OR_EXPR:
17034 case TRUTH_XOR_EXPR:
17036 tree orig_type = TREE_TYPE (*expr_p);
17037 tree new_type, xop0, xop1;
17038 *expr_p = gimple_boolify (*expr_p);
17039 new_type = TREE_TYPE (*expr_p);
17040 if (!useless_type_conversion_p (orig_type, new_type))
17042 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
17043 ret = GS_OK;
17044 break;
17047 /* Boolified binary truth expressions are semantically equivalent
17048 to bitwise binary expressions. Canonicalize them to the
17049 bitwise variant. */
17050 switch (TREE_CODE (*expr_p))
17052 case TRUTH_AND_EXPR:
17053 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
17054 break;
17055 case TRUTH_OR_EXPR:
17056 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
17057 break;
17058 case TRUTH_XOR_EXPR:
17059 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
17060 break;
17061 default:
17062 break;
17064 /* Now make sure that operands have compatible type to
17065 expression's new_type. */
17066 xop0 = TREE_OPERAND (*expr_p, 0);
17067 xop1 = TREE_OPERAND (*expr_p, 1);
17068 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
17069 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
17070 new_type,
17071 xop0);
17072 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
17073 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
17074 new_type,
17075 xop1);
17076 /* Continue classified as tcc_binary. */
17077 goto expr_2;
17080 case VEC_COND_EXPR:
17081 goto expr_3;
17083 case VEC_PERM_EXPR:
17084 /* Classified as tcc_expression. */
17085 goto expr_3;
17087 case BIT_INSERT_EXPR:
17088 /* Argument 3 is a constant. */
17089 goto expr_2;
17091 case POINTER_PLUS_EXPR:
17093 enum gimplify_status r0, r1;
17094 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17095 post_p, is_gimple_val, fb_rvalue);
17096 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17097 post_p, is_gimple_val, fb_rvalue);
17098 recalculate_side_effects (*expr_p);
17099 ret = MIN (r0, r1);
17100 break;
17103 default:
17104 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
17106 case tcc_comparison:
17107 /* Handle comparison of objects of non scalar mode aggregates
17108 with a call to memcmp. It would be nice to only have to do
17109 this for variable-sized objects, but then we'd have to allow
17110 the same nest of reference nodes we allow for MODIFY_EXPR and
17111 that's too complex.
17113 Compare scalar mode aggregates as scalar mode values. Using
17114 memcmp for them would be very inefficient at best, and is
17115 plain wrong if bitfields are involved. */
17116 if (error_operand_p (TREE_OPERAND (*expr_p, 1)))
17117 ret = GS_ERROR;
17118 else
17120 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
17122 /* Vector comparisons need no boolification. */
17123 if (TREE_CODE (type) == VECTOR_TYPE)
17124 goto expr_2;
17125 else if (!AGGREGATE_TYPE_P (type))
17127 tree org_type = TREE_TYPE (*expr_p);
17128 *expr_p = gimple_boolify (*expr_p);
17129 if (!useless_type_conversion_p (org_type,
17130 TREE_TYPE (*expr_p)))
17132 *expr_p = fold_convert_loc (input_location,
17133 org_type, *expr_p);
17134 ret = GS_OK;
17136 else
17137 goto expr_2;
17139 else if (TYPE_MODE (type) != BLKmode)
17140 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
17141 else
17142 ret = gimplify_variable_sized_compare (expr_p);
17144 break;
17146 /* If *EXPR_P does not need to be special-cased, handle it
17147 according to its class. */
17148 case tcc_unary:
17149 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17150 post_p, is_gimple_val, fb_rvalue);
17151 break;
17153 case tcc_binary:
17154 expr_2:
17156 enum gimplify_status r0, r1;
17158 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17159 post_p, is_gimple_val, fb_rvalue);
17160 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17161 post_p, is_gimple_val, fb_rvalue);
17163 ret = MIN (r0, r1);
17164 break;
17167 expr_3:
17169 enum gimplify_status r0, r1, r2;
17171 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17172 post_p, is_gimple_val, fb_rvalue);
17173 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17174 post_p, is_gimple_val, fb_rvalue);
17175 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
17176 post_p, is_gimple_val, fb_rvalue);
17178 ret = MIN (MIN (r0, r1), r2);
17179 break;
17182 case tcc_declaration:
17183 case tcc_constant:
17184 ret = GS_ALL_DONE;
17185 goto dont_recalculate;
17187 default:
17188 gcc_unreachable ();
17191 recalculate_side_effects (*expr_p);
17193 dont_recalculate:
17194 break;
17197 gcc_assert (*expr_p || ret != GS_OK);
17199 while (ret == GS_OK);
17201 /* If we encountered an error_mark somewhere nested inside, either
17202 stub out the statement or propagate the error back out. */
17203 if (ret == GS_ERROR)
17205 if (is_statement)
17206 *expr_p = NULL;
17207 goto out;
17210 /* This was only valid as a return value from the langhook, which
17211 we handled. Make sure it doesn't escape from any other context. */
17212 gcc_assert (ret != GS_UNHANDLED);
17214 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
17216 /* We aren't looking for a value, and we don't have a valid
17217 statement. If it doesn't have side-effects, throw it away.
17218 We can also get here with code such as "*&&L;", where L is
17219 a LABEL_DECL that is marked as FORCED_LABEL. */
17220 if (TREE_CODE (*expr_p) == LABEL_DECL
17221 || !TREE_SIDE_EFFECTS (*expr_p))
17222 *expr_p = NULL;
17223 else if (!TREE_THIS_VOLATILE (*expr_p))
17225 /* This is probably a _REF that contains something nested that
17226 has side effects. Recurse through the operands to find it. */
17227 enum tree_code code = TREE_CODE (*expr_p);
17229 switch (code)
17231 case COMPONENT_REF:
17232 case REALPART_EXPR:
17233 case IMAGPART_EXPR:
17234 case VIEW_CONVERT_EXPR:
17235 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17236 gimple_test_f, fallback);
17237 break;
17239 case ARRAY_REF:
17240 case ARRAY_RANGE_REF:
17241 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17242 gimple_test_f, fallback);
17243 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
17244 gimple_test_f, fallback);
17245 break;
17247 default:
17248 /* Anything else with side-effects must be converted to
17249 a valid statement before we get here. */
17250 gcc_unreachable ();
17253 *expr_p = NULL;
17255 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
17256 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
17257 && !is_empty_type (TREE_TYPE (*expr_p)))
17259 /* Historically, the compiler has treated a bare reference
17260 to a non-BLKmode volatile lvalue as forcing a load. */
17261 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
17263 /* Normally, we do not want to create a temporary for a
17264 TREE_ADDRESSABLE type because such a type should not be
17265 copied by bitwise-assignment. However, we make an
17266 exception here, as all we are doing here is ensuring that
17267 we read the bytes that make up the type. We use
17268 create_tmp_var_raw because create_tmp_var will abort when
17269 given a TREE_ADDRESSABLE type. */
17270 tree tmp = create_tmp_var_raw (type, "vol");
17271 gimple_add_tmp_var (tmp);
17272 gimplify_assign (tmp, *expr_p, pre_p);
17273 *expr_p = NULL;
17275 else
17276 /* We can't do anything useful with a volatile reference to
17277 an incomplete type, so just throw it away. Likewise for
17278 a BLKmode type, since any implicit inner load should
17279 already have been turned into an explicit one by the
17280 gimplification process. */
17281 *expr_p = NULL;
17284 /* If we are gimplifying at the statement level, we're done. Tack
17285 everything together and return. */
17286 if (fallback == fb_none || is_statement)
17288 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
17289 it out for GC to reclaim it. */
17290 *expr_p = NULL_TREE;
17292 if (!gimple_seq_empty_p (internal_pre)
17293 || !gimple_seq_empty_p (internal_post))
17295 gimplify_seq_add_seq (&internal_pre, internal_post);
17296 gimplify_seq_add_seq (pre_p, internal_pre);
17299 /* The result of gimplifying *EXPR_P is going to be the last few
17300 statements in *PRE_P and *POST_P. Add location information
17301 to all the statements that were added by the gimplification
17302 helpers. */
17303 if (!gimple_seq_empty_p (*pre_p))
17304 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
17306 if (!gimple_seq_empty_p (*post_p))
17307 annotate_all_with_location_after (*post_p, post_last_gsi,
17308 input_location);
17310 goto out;
17313 #ifdef ENABLE_GIMPLE_CHECKING
17314 if (*expr_p)
17316 enum tree_code code = TREE_CODE (*expr_p);
17317 /* These expressions should already be in gimple IR form. */
17318 gcc_assert (code != MODIFY_EXPR
17319 && code != ASM_EXPR
17320 && code != BIND_EXPR
17321 && code != CATCH_EXPR
17322 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
17323 && code != EH_FILTER_EXPR
17324 && code != GOTO_EXPR
17325 && code != LABEL_EXPR
17326 && code != LOOP_EXPR
17327 && code != SWITCH_EXPR
17328 && code != TRY_FINALLY_EXPR
17329 && code != EH_ELSE_EXPR
17330 && code != OACC_PARALLEL
17331 && code != OACC_KERNELS
17332 && code != OACC_SERIAL
17333 && code != OACC_DATA
17334 && code != OACC_HOST_DATA
17335 && code != OACC_DECLARE
17336 && code != OACC_UPDATE
17337 && code != OACC_ENTER_DATA
17338 && code != OACC_EXIT_DATA
17339 && code != OACC_CACHE
17340 && code != OMP_CRITICAL
17341 && code != OMP_FOR
17342 && code != OACC_LOOP
17343 && code != OMP_MASTER
17344 && code != OMP_MASKED
17345 && code != OMP_TASKGROUP
17346 && code != OMP_ORDERED
17347 && code != OMP_PARALLEL
17348 && code != OMP_SCAN
17349 && code != OMP_SECTIONS
17350 && code != OMP_SECTION
17351 && code != OMP_SINGLE
17352 && code != OMP_SCOPE);
17354 #endif
17356 /* Otherwise we're gimplifying a subexpression, so the resulting
17357 value is interesting. If it's a valid operand that matches
17358 GIMPLE_TEST_F, we're done. Unless we are handling some
17359 post-effects internally; if that's the case, we need to copy into
17360 a temporary before adding the post-effects to POST_P. */
17361 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
17362 goto out;
17364 /* Otherwise, we need to create a new temporary for the gimplified
17365 expression. */
17367 /* We can't return an lvalue if we have an internal postqueue. The
17368 object the lvalue refers to would (probably) be modified by the
17369 postqueue; we need to copy the value out first, which means an
17370 rvalue. */
17371 if ((fallback & fb_lvalue)
17372 && gimple_seq_empty_p (internal_post)
17373 && is_gimple_addressable (*expr_p))
17375 /* An lvalue will do. Take the address of the expression, store it
17376 in a temporary, and replace the expression with an INDIRECT_REF of
17377 that temporary. */
17378 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
17379 unsigned int ref_align = get_object_alignment (*expr_p);
17380 tree ref_type = TREE_TYPE (*expr_p);
17381 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
17382 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
17383 if (TYPE_ALIGN (ref_type) != ref_align)
17384 ref_type = build_aligned_type (ref_type, ref_align);
17385 *expr_p = build2 (MEM_REF, ref_type,
17386 tmp, build_zero_cst (ref_alias_type));
17388 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
17390 /* An rvalue will do. Assign the gimplified expression into a
17391 new temporary TMP and replace the original expression with
17392 TMP. First, make sure that the expression has a type so that
17393 it can be assigned into a temporary. */
17394 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
17395 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
17397 else
17399 #ifdef ENABLE_GIMPLE_CHECKING
17400 if (!(fallback & fb_mayfail))
17402 fprintf (stderr, "gimplification failed:\n");
17403 print_generic_expr (stderr, *expr_p);
17404 debug_tree (*expr_p);
17405 internal_error ("gimplification failed");
17407 #endif
17408 gcc_assert (fallback & fb_mayfail);
17410 /* If this is an asm statement, and the user asked for the
17411 impossible, don't die. Fail and let gimplify_asm_expr
17412 issue an error. */
17413 ret = GS_ERROR;
17414 goto out;
17417 /* Make sure the temporary matches our predicate. */
17418 gcc_assert ((*gimple_test_f) (*expr_p));
17420 if (!gimple_seq_empty_p (internal_post))
17422 annotate_all_with_location (internal_post, input_location);
17423 gimplify_seq_add_seq (pre_p, internal_post);
17426 out:
17427 input_location = saved_location;
17428 return ret;
17431 /* Like gimplify_expr but make sure the gimplified result is not itself
17432 a SSA name (but a decl if it were). Temporaries required by
17433 evaluating *EXPR_P may be still SSA names. */
17435 static enum gimplify_status
17436 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
17437 bool (*gimple_test_f) (tree), fallback_t fallback,
17438 bool allow_ssa)
17440 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
17441 gimple_test_f, fallback);
17442 if (! allow_ssa
17443 && TREE_CODE (*expr_p) == SSA_NAME)
17444 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
17445 return ret;
17448 /* Look through TYPE for variable-sized objects and gimplify each such
17449 size that we find. Add to LIST_P any statements generated. */
17451 void
17452 gimplify_type_sizes (tree type, gimple_seq *list_p)
17454 if (type == NULL || type == error_mark_node)
17455 return;
17457 const bool ignored_p
17458 = TYPE_NAME (type)
17459 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
17460 && DECL_IGNORED_P (TYPE_NAME (type));
17461 tree t;
17463 /* We first do the main variant, then copy into any other variants. */
17464 type = TYPE_MAIN_VARIANT (type);
17466 /* Avoid infinite recursion. */
17467 if (TYPE_SIZES_GIMPLIFIED (type))
17468 return;
17470 TYPE_SIZES_GIMPLIFIED (type) = 1;
17472 switch (TREE_CODE (type))
17474 case INTEGER_TYPE:
17475 case ENUMERAL_TYPE:
17476 case BOOLEAN_TYPE:
17477 case REAL_TYPE:
17478 case FIXED_POINT_TYPE:
17479 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
17480 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
17482 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17484 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
17485 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
17487 break;
17489 case ARRAY_TYPE:
17490 /* These types may not have declarations, so handle them here. */
17491 gimplify_type_sizes (TREE_TYPE (type), list_p);
17492 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
17493 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
17494 with assigned stack slots, for -O1+ -g they should be tracked
17495 by VTA. */
17496 if (!ignored_p
17497 && TYPE_DOMAIN (type)
17498 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
17500 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
17501 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17502 DECL_IGNORED_P (t) = 0;
17503 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
17504 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17505 DECL_IGNORED_P (t) = 0;
17507 break;
17509 case RECORD_TYPE:
17510 case UNION_TYPE:
17511 case QUAL_UNION_TYPE:
17512 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
17513 if (TREE_CODE (field) == FIELD_DECL)
17515 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
17516 /* Likewise, ensure variable offsets aren't removed. */
17517 if (!ignored_p
17518 && (t = DECL_FIELD_OFFSET (field))
17519 && VAR_P (t)
17520 && DECL_ARTIFICIAL (t))
17521 DECL_IGNORED_P (t) = 0;
17522 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
17523 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
17524 gimplify_type_sizes (TREE_TYPE (field), list_p);
17526 break;
17528 case POINTER_TYPE:
17529 case REFERENCE_TYPE:
17530 /* We used to recurse on the pointed-to type here, which turned out to
17531 be incorrect because its definition might refer to variables not
17532 yet initialized at this point if a forward declaration is involved.
17534 It was actually useful for anonymous pointed-to types to ensure
17535 that the sizes evaluation dominates every possible later use of the
17536 values. Restricting to such types here would be safe since there
17537 is no possible forward declaration around, but would introduce an
17538 undesirable middle-end semantic to anonymity. We then defer to
17539 front-ends the responsibility of ensuring that the sizes are
17540 evaluated both early and late enough, e.g. by attaching artificial
17541 type declarations to the tree. */
17542 break;
17544 default:
17545 break;
17548 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
17549 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
17551 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17553 TYPE_SIZE (t) = TYPE_SIZE (type);
17554 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
17555 TYPE_SIZES_GIMPLIFIED (t) = 1;
17559 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
17560 a size or position, has had all of its SAVE_EXPRs evaluated.
17561 We add any required statements to *STMT_P. */
17563 void
17564 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
17566 tree expr = *expr_p;
17568 /* We don't do anything if the value isn't there, is constant, or contains
17569 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
17570 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
17571 will want to replace it with a new variable, but that will cause problems
17572 if this type is from outside the function. It's OK to have that here. */
17573 if (expr == NULL_TREE
17574 || is_gimple_constant (expr)
17575 || TREE_CODE (expr) == VAR_DECL
17576 || CONTAINS_PLACEHOLDER_P (expr))
17577 return;
17579 *expr_p = unshare_expr (expr);
17581 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
17582 if the def vanishes. */
17583 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
17585 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
17586 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
17587 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
17588 if (is_gimple_constant (*expr_p))
17589 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
17592 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
17593 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
17594 is true, also gimplify the parameters. */
17596 gbind *
17597 gimplify_body (tree fndecl, bool do_parms)
17599 location_t saved_location = input_location;
17600 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
17601 gimple *outer_stmt;
17602 gbind *outer_bind;
17604 timevar_push (TV_TREE_GIMPLIFY);
17606 init_tree_ssa (cfun);
17608 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
17609 gimplification. */
17610 default_rtl_profile ();
17612 gcc_assert (gimplify_ctxp == NULL);
17613 push_gimplify_context (true);
17615 if (flag_openacc || flag_openmp)
17617 gcc_assert (gimplify_omp_ctxp == NULL);
17618 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
17619 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
17622 /* Unshare most shared trees in the body and in that of any nested functions.
17623 It would seem we don't have to do this for nested functions because
17624 they are supposed to be output and then the outer function gimplified
17625 first, but the g++ front end doesn't always do it that way. */
17626 unshare_body (fndecl);
17627 unvisit_body (fndecl);
17629 /* Make sure input_location isn't set to something weird. */
17630 input_location = DECL_SOURCE_LOCATION (fndecl);
17632 /* Resolve callee-copies. This has to be done before processing
17633 the body so that DECL_VALUE_EXPR gets processed correctly. */
17634 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
17636 /* Gimplify the function's body. */
17637 seq = NULL;
17638 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
17639 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
17640 if (!outer_stmt)
17642 outer_stmt = gimple_build_nop ();
17643 gimplify_seq_add_stmt (&seq, outer_stmt);
17646 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
17647 not the case, wrap everything in a GIMPLE_BIND to make it so. */
17648 if (gimple_code (outer_stmt) == GIMPLE_BIND
17649 && (gimple_seq_first_nondebug_stmt (seq)
17650 == gimple_seq_last_nondebug_stmt (seq)))
17652 outer_bind = as_a <gbind *> (outer_stmt);
17653 if (gimple_seq_first_stmt (seq) != outer_stmt
17654 || gimple_seq_last_stmt (seq) != outer_stmt)
17656 /* If there are debug stmts before or after outer_stmt, move them
17657 inside of outer_bind body. */
17658 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
17659 gimple_seq second_seq = NULL;
17660 if (gimple_seq_first_stmt (seq) != outer_stmt
17661 && gimple_seq_last_stmt (seq) != outer_stmt)
17663 second_seq = gsi_split_seq_after (gsi);
17664 gsi_remove (&gsi, false);
17666 else if (gimple_seq_first_stmt (seq) != outer_stmt)
17667 gsi_remove (&gsi, false);
17668 else
17670 gsi_remove (&gsi, false);
17671 second_seq = seq;
17672 seq = NULL;
17674 gimple_seq_add_seq_without_update (&seq,
17675 gimple_bind_body (outer_bind));
17676 gimple_seq_add_seq_without_update (&seq, second_seq);
17677 gimple_bind_set_body (outer_bind, seq);
17680 else
17681 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
17683 DECL_SAVED_TREE (fndecl) = NULL_TREE;
17685 /* If we had callee-copies statements, insert them at the beginning
17686 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
17687 if (!gimple_seq_empty_p (parm_stmts))
17689 tree parm;
17691 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
17692 if (parm_cleanup)
17694 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
17695 GIMPLE_TRY_FINALLY);
17696 parm_stmts = NULL;
17697 gimple_seq_add_stmt (&parm_stmts, g);
17699 gimple_bind_set_body (outer_bind, parm_stmts);
17701 for (parm = DECL_ARGUMENTS (current_function_decl);
17702 parm; parm = DECL_CHAIN (parm))
17703 if (DECL_HAS_VALUE_EXPR_P (parm))
17705 DECL_HAS_VALUE_EXPR_P (parm) = 0;
17706 DECL_IGNORED_P (parm) = 0;
17710 if ((flag_openacc || flag_openmp || flag_openmp_simd)
17711 && gimplify_omp_ctxp)
17713 delete_omp_context (gimplify_omp_ctxp);
17714 gimplify_omp_ctxp = NULL;
17717 pop_gimplify_context (outer_bind);
17718 gcc_assert (gimplify_ctxp == NULL);
17720 if (flag_checking && !seen_error ())
17721 verify_gimple_in_seq (gimple_bind_body (outer_bind));
17723 timevar_pop (TV_TREE_GIMPLIFY);
17724 input_location = saved_location;
17726 return outer_bind;
17729 typedef char *char_p; /* For DEF_VEC_P. */
17731 /* Return whether we should exclude FNDECL from instrumentation. */
17733 static bool
17734 flag_instrument_functions_exclude_p (tree fndecl)
17736 vec<char_p> *v;
17738 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
17739 if (v && v->length () > 0)
17741 const char *name;
17742 int i;
17743 char *s;
17745 name = lang_hooks.decl_printable_name (fndecl, 1);
17746 FOR_EACH_VEC_ELT (*v, i, s)
17747 if (strstr (name, s) != NULL)
17748 return true;
17751 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
17752 if (v && v->length () > 0)
17754 const char *name;
17755 int i;
17756 char *s;
17758 name = DECL_SOURCE_FILE (fndecl);
17759 FOR_EACH_VEC_ELT (*v, i, s)
17760 if (strstr (name, s) != NULL)
17761 return true;
17764 return false;
17767 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
17768 If COND_VAR is not NULL, it is a boolean variable guarding the call to
17769 the instrumentation function. IF STMT is not NULL, it is a statement
17770 to be executed just before the call to the instrumentation function. */
17772 static void
17773 build_instrumentation_call (gimple_seq *seq, enum built_in_function fncode,
17774 tree cond_var, gimple *stmt)
17776 /* The instrumentation hooks aren't going to call the instrumented
17777 function and the address they receive is expected to be matchable
17778 against symbol addresses. Make sure we don't create a trampoline,
17779 in case the current function is nested. */
17780 tree this_fn_addr = build_fold_addr_expr (current_function_decl);
17781 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
17783 tree label_true, label_false;
17784 if (cond_var)
17786 label_true = create_artificial_label (UNKNOWN_LOCATION);
17787 label_false = create_artificial_label (UNKNOWN_LOCATION);
17788 gcond *cond = gimple_build_cond (EQ_EXPR, cond_var, boolean_false_node,
17789 label_true, label_false);
17790 gimplify_seq_add_stmt (seq, cond);
17791 gimplify_seq_add_stmt (seq, gimple_build_label (label_true));
17792 gimplify_seq_add_stmt (seq, gimple_build_predict (PRED_COLD_LABEL,
17793 NOT_TAKEN));
17796 if (stmt)
17797 gimplify_seq_add_stmt (seq, stmt);
17799 tree x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
17800 gcall *call = gimple_build_call (x, 1, integer_zero_node);
17801 tree tmp_var = create_tmp_var (ptr_type_node, "return_addr");
17802 gimple_call_set_lhs (call, tmp_var);
17803 gimplify_seq_add_stmt (seq, call);
17804 x = builtin_decl_implicit (fncode);
17805 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
17806 gimplify_seq_add_stmt (seq, call);
17808 if (cond_var)
17809 gimplify_seq_add_stmt (seq, gimple_build_label (label_false));
17812 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
17813 node for the function we want to gimplify.
17815 Return the sequence of GIMPLE statements corresponding to the body
17816 of FNDECL. */
17818 void
17819 gimplify_function_tree (tree fndecl)
17821 gimple_seq seq;
17822 gbind *bind;
17824 gcc_assert (!gimple_body (fndecl));
17826 if (DECL_STRUCT_FUNCTION (fndecl))
17827 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
17828 else
17829 push_struct_function (fndecl);
17831 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
17832 if necessary. */
17833 cfun->curr_properties |= PROP_gimple_lva;
17835 if (asan_sanitize_use_after_scope ())
17836 asan_poisoned_variables = new hash_set<tree> ();
17837 bind = gimplify_body (fndecl, true);
17838 if (asan_poisoned_variables)
17840 delete asan_poisoned_variables;
17841 asan_poisoned_variables = NULL;
17844 /* The tree body of the function is no longer needed, replace it
17845 with the new GIMPLE body. */
17846 seq = NULL;
17847 gimple_seq_add_stmt (&seq, bind);
17848 gimple_set_body (fndecl, seq);
17850 /* If we're instrumenting function entry/exit, then prepend the call to
17851 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
17852 catch the exit hook. */
17853 /* ??? Add some way to ignore exceptions for this TFE. */
17854 if (flag_instrument_function_entry_exit
17855 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
17856 /* Do not instrument extern inline functions. */
17857 && !(DECL_DECLARED_INLINE_P (fndecl)
17858 && DECL_EXTERNAL (fndecl)
17859 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
17860 && !flag_instrument_functions_exclude_p (fndecl))
17862 gimple_seq body = NULL, cleanup = NULL;
17863 gassign *assign;
17864 tree cond_var;
17866 /* If -finstrument-functions-once is specified, generate:
17868 static volatile bool C.0 = false;
17869 bool tmp_called;
17871 tmp_called = C.0;
17872 if (!tmp_called)
17874 C.0 = true;
17875 [call profiling enter function]
17878 without specific protection for data races. */
17879 if (flag_instrument_function_entry_exit > 1)
17881 tree first_var
17882 = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
17883 VAR_DECL,
17884 create_tmp_var_name ("C"),
17885 boolean_type_node);
17886 DECL_ARTIFICIAL (first_var) = 1;
17887 DECL_IGNORED_P (first_var) = 1;
17888 TREE_STATIC (first_var) = 1;
17889 TREE_THIS_VOLATILE (first_var) = 1;
17890 TREE_USED (first_var) = 1;
17891 DECL_INITIAL (first_var) = boolean_false_node;
17892 varpool_node::add (first_var);
17894 cond_var = create_tmp_var (boolean_type_node, "tmp_called");
17895 assign = gimple_build_assign (cond_var, first_var);
17896 gimplify_seq_add_stmt (&body, assign);
17898 assign = gimple_build_assign (first_var, boolean_true_node);
17901 else
17903 cond_var = NULL_TREE;
17904 assign = NULL;
17907 build_instrumentation_call (&body, BUILT_IN_PROFILE_FUNC_ENTER,
17908 cond_var, assign);
17910 /* If -finstrument-functions-once is specified, generate:
17912 if (!tmp_called)
17913 [call profiling exit function]
17915 without specific protection for data races. */
17916 build_instrumentation_call (&cleanup, BUILT_IN_PROFILE_FUNC_EXIT,
17917 cond_var, NULL);
17919 gimple *tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
17920 gimplify_seq_add_stmt (&body, tf);
17921 gbind *new_bind = gimple_build_bind (NULL, body, NULL);
17923 /* Replace the current function body with the body
17924 wrapped in the try/finally TF. */
17925 seq = NULL;
17926 gimple_seq_add_stmt (&seq, new_bind);
17927 gimple_set_body (fndecl, seq);
17928 bind = new_bind;
17931 if (sanitize_flags_p (SANITIZE_THREAD)
17932 && param_tsan_instrument_func_entry_exit)
17934 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
17935 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
17936 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
17937 /* Replace the current function body with the body
17938 wrapped in the try/finally TF. */
17939 seq = NULL;
17940 gimple_seq_add_stmt (&seq, new_bind);
17941 gimple_set_body (fndecl, seq);
17944 DECL_SAVED_TREE (fndecl) = NULL_TREE;
17945 cfun->curr_properties |= PROP_gimple_any;
17947 pop_cfun ();
17949 dump_function (TDI_gimple, fndecl);
17952 /* Return a dummy expression of type TYPE in order to keep going after an
17953 error. */
17955 static tree
17956 dummy_object (tree type)
17958 tree t = build_int_cst (build_pointer_type (type), 0);
17959 return build2 (MEM_REF, type, t, t);
17962 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
17963 builtin function, but a very special sort of operator. */
17965 enum gimplify_status
17966 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
17967 gimple_seq *post_p ATTRIBUTE_UNUSED)
17969 tree promoted_type, have_va_type;
17970 tree valist = TREE_OPERAND (*expr_p, 0);
17971 tree type = TREE_TYPE (*expr_p);
17972 tree t, tag, aptag;
17973 location_t loc = EXPR_LOCATION (*expr_p);
17975 /* Verify that valist is of the proper type. */
17976 have_va_type = TREE_TYPE (valist);
17977 if (have_va_type == error_mark_node)
17978 return GS_ERROR;
17979 have_va_type = targetm.canonical_va_list_type (have_va_type);
17980 if (have_va_type == NULL_TREE
17981 && POINTER_TYPE_P (TREE_TYPE (valist)))
17982 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
17983 have_va_type
17984 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
17985 gcc_assert (have_va_type != NULL_TREE);
17987 /* Generate a diagnostic for requesting data of a type that cannot
17988 be passed through `...' due to type promotion at the call site. */
17989 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
17990 != type)
17992 static bool gave_help;
17993 bool warned;
17994 /* Use the expansion point to handle cases such as passing bool (defined
17995 in a system header) through `...'. */
17996 location_t xloc
17997 = expansion_point_location_if_in_system_header (loc);
17999 /* Unfortunately, this is merely undefined, rather than a constraint
18000 violation, so we cannot make this an error. If this call is never
18001 executed, the program is still strictly conforming. */
18002 auto_diagnostic_group d;
18003 warned = warning_at (xloc, 0,
18004 "%qT is promoted to %qT when passed through %<...%>",
18005 type, promoted_type);
18006 if (!gave_help && warned)
18008 gave_help = true;
18009 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
18010 promoted_type, type);
18013 /* We can, however, treat "undefined" any way we please.
18014 Call abort to encourage the user to fix the program. */
18015 if (warned)
18016 inform (xloc, "if this code is reached, the program will abort");
18017 /* Before the abort, allow the evaluation of the va_list
18018 expression to exit or longjmp. */
18019 gimplify_and_add (valist, pre_p);
18020 t = build_call_expr_loc (loc,
18021 builtin_decl_implicit (BUILT_IN_TRAP), 0);
18022 gimplify_and_add (t, pre_p);
18024 /* This is dead code, but go ahead and finish so that the
18025 mode of the result comes out right. */
18026 *expr_p = dummy_object (type);
18027 return GS_ALL_DONE;
18030 tag = build_int_cst (build_pointer_type (type), 0);
18031 aptag = build_int_cst (TREE_TYPE (valist), 0);
18033 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
18034 valist, tag, aptag);
18036 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
18037 needs to be expanded. */
18038 cfun->curr_properties &= ~PROP_gimple_lva;
18040 return GS_OK;
18043 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
18045 DST/SRC are the destination and source respectively. You can pass
18046 ungimplified trees in DST or SRC, in which case they will be
18047 converted to a gimple operand if necessary.
18049 This function returns the newly created GIMPLE_ASSIGN tuple. */
18051 gimple *
18052 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
18054 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
18055 gimplify_and_add (t, seq_p);
18056 ggc_free (t);
18057 return gimple_seq_last_stmt (*seq_p);
18060 inline hashval_t
18061 gimplify_hasher::hash (const elt_t *p)
18063 tree t = p->val;
18064 return iterative_hash_expr (t, 0);
18067 inline bool
18068 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
18070 tree t1 = p1->val;
18071 tree t2 = p2->val;
18072 enum tree_code code = TREE_CODE (t1);
18074 if (TREE_CODE (t2) != code
18075 || TREE_TYPE (t1) != TREE_TYPE (t2))
18076 return false;
18078 if (!operand_equal_p (t1, t2, 0))
18079 return false;
18081 /* Only allow them to compare equal if they also hash equal; otherwise
18082 results are nondeterminate, and we fail bootstrap comparison. */
18083 gcc_checking_assert (hash (p1) == hash (p2));
18085 return true;