testsuite: remove SPE tests.
[official-gcc.git] / gcc / gimplify.c
blob968fae2f1a3cba5699f2a12875181d56b4550e05
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2020 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
68 #include "omp-offload.h"
69 #include "context.h"
71 /* Hash set of poisoned variables in a bind expr. */
72 static hash_set<tree> *asan_poisoned_variables = NULL;
74 enum gimplify_omp_var_data
76 GOVD_SEEN = 0x000001,
77 GOVD_EXPLICIT = 0x000002,
78 GOVD_SHARED = 0x000004,
79 GOVD_PRIVATE = 0x000008,
80 GOVD_FIRSTPRIVATE = 0x000010,
81 GOVD_LASTPRIVATE = 0x000020,
82 GOVD_REDUCTION = 0x000040,
83 GOVD_LOCAL = 0x00080,
84 GOVD_MAP = 0x000100,
85 GOVD_DEBUG_PRIVATE = 0x000200,
86 GOVD_PRIVATE_OUTER_REF = 0x000400,
87 GOVD_LINEAR = 0x000800,
88 GOVD_ALIGNED = 0x001000,
90 /* Flag for GOVD_MAP: don't copy back. */
91 GOVD_MAP_TO_ONLY = 0x002000,
93 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
94 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
96 GOVD_MAP_0LEN_ARRAY = 0x008000,
98 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
99 GOVD_MAP_ALWAYS_TO = 0x010000,
101 /* Flag for shared vars that are or might be stored to in the region. */
102 GOVD_WRITTEN = 0x020000,
104 /* Flag for GOVD_MAP, if it is a forced mapping. */
105 GOVD_MAP_FORCE = 0x040000,
107 /* Flag for GOVD_MAP: must be present already. */
108 GOVD_MAP_FORCE_PRESENT = 0x080000,
110 /* Flag for GOVD_MAP: only allocate. */
111 GOVD_MAP_ALLOC_ONLY = 0x100000,
113 /* Flag for GOVD_MAP: only copy back. */
114 GOVD_MAP_FROM_ONLY = 0x200000,
116 GOVD_NONTEMPORAL = 0x400000,
118 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
119 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
121 GOVD_CONDTEMP = 0x1000000,
123 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
124 GOVD_REDUCTION_INSCAN = 0x2000000,
126 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
127 fields. */
128 GOVD_MAP_HAS_ATTACHMENTS = 8388608,
130 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
131 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
132 | GOVD_LOCAL)
136 enum omp_region_type
138 ORT_WORKSHARE = 0x00,
139 ORT_TASKGROUP = 0x01,
140 ORT_SIMD = 0x04,
142 ORT_PARALLEL = 0x08,
143 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
145 ORT_TASK = 0x10,
146 ORT_UNTIED_TASK = ORT_TASK | 1,
147 ORT_TASKLOOP = ORT_TASK | 2,
148 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
150 ORT_TEAMS = 0x20,
151 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
152 ORT_HOST_TEAMS = ORT_TEAMS | 2,
153 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
155 /* Data region. */
156 ORT_TARGET_DATA = 0x40,
158 /* Data region with offloading. */
159 ORT_TARGET = 0x80,
160 ORT_COMBINED_TARGET = ORT_TARGET | 1,
161 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
163 /* OpenACC variants. */
164 ORT_ACC = 0x100, /* A generic OpenACC region. */
165 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
166 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
167 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
168 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
169 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
171 /* Dummy OpenMP region, used to disable expansion of
172 DECL_VALUE_EXPRs in taskloop pre body. */
173 ORT_NONE = 0x200
176 /* Gimplify hashtable helper. */
178 struct gimplify_hasher : free_ptr_hash <elt_t>
180 static inline hashval_t hash (const elt_t *);
181 static inline bool equal (const elt_t *, const elt_t *);
184 struct gimplify_ctx
186 struct gimplify_ctx *prev_context;
188 vec<gbind *> bind_expr_stack;
189 tree temps;
190 gimple_seq conditional_cleanups;
191 tree exit_label;
192 tree return_temp;
194 vec<tree> case_labels;
195 hash_set<tree> *live_switch_vars;
196 /* The formal temporary table. Should this be persistent? */
197 hash_table<gimplify_hasher> *temp_htab;
199 int conditions;
200 unsigned into_ssa : 1;
201 unsigned allow_rhs_cond_expr : 1;
202 unsigned in_cleanup_point_expr : 1;
203 unsigned keep_stack : 1;
204 unsigned save_stack : 1;
205 unsigned in_switch_expr : 1;
208 enum gimplify_defaultmap_kind
210 GDMK_SCALAR,
211 GDMK_AGGREGATE,
212 GDMK_ALLOCATABLE,
213 GDMK_POINTER
216 struct gimplify_omp_ctx
218 struct gimplify_omp_ctx *outer_context;
219 splay_tree variables;
220 hash_set<tree> *privatized_types;
221 tree clauses;
222 /* Iteration variables in an OMP_FOR. */
223 vec<tree> loop_iter_var;
224 location_t location;
225 enum omp_clause_default_kind default_kind;
226 enum omp_region_type region_type;
227 enum tree_code code;
228 bool combined_loop;
229 bool distribute;
230 bool target_firstprivatize_array_bases;
231 bool add_safelen1;
232 bool order_concurrent;
233 int defaultmap[4];
236 static struct gimplify_ctx *gimplify_ctxp;
237 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
238 static bool in_omp_construct;
240 /* Forward declaration. */
241 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
242 static hash_map<tree, tree> *oacc_declare_returns;
243 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
244 bool (*) (tree), fallback_t, bool);
246 /* Shorter alias name for the above function for use in gimplify.c
247 only. */
249 static inline void
250 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
252 gimple_seq_add_stmt_without_update (seq_p, gs);
255 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
256 NULL, a new sequence is allocated. This function is
257 similar to gimple_seq_add_seq, but does not scan the operands.
258 During gimplification, we need to manipulate statement sequences
259 before the def/use vectors have been constructed. */
261 static void
262 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
264 gimple_stmt_iterator si;
266 if (src == NULL)
267 return;
269 si = gsi_last (*dst_p);
270 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
274 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
275 and popping gimplify contexts. */
277 static struct gimplify_ctx *ctx_pool = NULL;
279 /* Return a gimplify context struct from the pool. */
281 static inline struct gimplify_ctx *
282 ctx_alloc (void)
284 struct gimplify_ctx * c = ctx_pool;
286 if (c)
287 ctx_pool = c->prev_context;
288 else
289 c = XNEW (struct gimplify_ctx);
291 memset (c, '\0', sizeof (*c));
292 return c;
295 /* Put gimplify context C back into the pool. */
297 static inline void
298 ctx_free (struct gimplify_ctx *c)
300 c->prev_context = ctx_pool;
301 ctx_pool = c;
304 /* Free allocated ctx stack memory. */
306 void
307 free_gimplify_stack (void)
309 struct gimplify_ctx *c;
311 while ((c = ctx_pool))
313 ctx_pool = c->prev_context;
314 free (c);
319 /* Set up a context for the gimplifier. */
321 void
322 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
324 struct gimplify_ctx *c = ctx_alloc ();
326 c->prev_context = gimplify_ctxp;
327 gimplify_ctxp = c;
328 gimplify_ctxp->into_ssa = in_ssa;
329 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
332 /* Tear down a context for the gimplifier. If BODY is non-null, then
333 put the temporaries into the outer BIND_EXPR. Otherwise, put them
334 in the local_decls.
336 BODY is not a sequence, but the first tuple in a sequence. */
338 void
339 pop_gimplify_context (gimple *body)
341 struct gimplify_ctx *c = gimplify_ctxp;
343 gcc_assert (c
344 && (!c->bind_expr_stack.exists ()
345 || c->bind_expr_stack.is_empty ()));
346 c->bind_expr_stack.release ();
347 gimplify_ctxp = c->prev_context;
349 if (body)
350 declare_vars (c->temps, body, false);
351 else
352 record_vars (c->temps);
354 delete c->temp_htab;
355 c->temp_htab = NULL;
356 ctx_free (c);
359 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
361 static void
362 gimple_push_bind_expr (gbind *bind_stmt)
364 gimplify_ctxp->bind_expr_stack.reserve (8);
365 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
368 /* Pop the first element off the stack of bindings. */
370 static void
371 gimple_pop_bind_expr (void)
373 gimplify_ctxp->bind_expr_stack.pop ();
376 /* Return the first element of the stack of bindings. */
378 gbind *
379 gimple_current_bind_expr (void)
381 return gimplify_ctxp->bind_expr_stack.last ();
384 /* Return the stack of bindings created during gimplification. */
386 vec<gbind *>
387 gimple_bind_expr_stack (void)
389 return gimplify_ctxp->bind_expr_stack;
392 /* Return true iff there is a COND_EXPR between us and the innermost
393 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
395 static bool
396 gimple_conditional_context (void)
398 return gimplify_ctxp->conditions > 0;
401 /* Note that we've entered a COND_EXPR. */
403 static void
404 gimple_push_condition (void)
406 #ifdef ENABLE_GIMPLE_CHECKING
407 if (gimplify_ctxp->conditions == 0)
408 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
409 #endif
410 ++(gimplify_ctxp->conditions);
413 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
414 now, add any conditional cleanups we've seen to the prequeue. */
416 static void
417 gimple_pop_condition (gimple_seq *pre_p)
419 int conds = --(gimplify_ctxp->conditions);
421 gcc_assert (conds >= 0);
422 if (conds == 0)
424 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
425 gimplify_ctxp->conditional_cleanups = NULL;
429 /* A stable comparison routine for use with splay trees and DECLs. */
431 static int
432 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
434 tree a = (tree) xa;
435 tree b = (tree) xb;
437 return DECL_UID (a) - DECL_UID (b);
440 /* Create a new omp construct that deals with variable remapping. */
442 static struct gimplify_omp_ctx *
443 new_omp_context (enum omp_region_type region_type)
445 struct gimplify_omp_ctx *c;
447 c = XCNEW (struct gimplify_omp_ctx);
448 c->outer_context = gimplify_omp_ctxp;
449 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
450 c->privatized_types = new hash_set<tree>;
451 c->location = input_location;
452 c->region_type = region_type;
453 if ((region_type & ORT_TASK) == 0)
454 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
455 else
456 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
457 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
458 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
459 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
460 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
462 return c;
465 /* Destroy an omp construct that deals with variable remapping. */
467 static void
468 delete_omp_context (struct gimplify_omp_ctx *c)
470 splay_tree_delete (c->variables);
471 delete c->privatized_types;
472 c->loop_iter_var.release ();
473 XDELETE (c);
476 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
477 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
479 /* Both gimplify the statement T and append it to *SEQ_P. This function
480 behaves exactly as gimplify_stmt, but you don't have to pass T as a
481 reference. */
483 void
484 gimplify_and_add (tree t, gimple_seq *seq_p)
486 gimplify_stmt (&t, seq_p);
489 /* Gimplify statement T into sequence *SEQ_P, and return the first
490 tuple in the sequence of generated tuples for this statement.
491 Return NULL if gimplifying T produced no tuples. */
493 static gimple *
494 gimplify_and_return_first (tree t, gimple_seq *seq_p)
496 gimple_stmt_iterator last = gsi_last (*seq_p);
498 gimplify_and_add (t, seq_p);
500 if (!gsi_end_p (last))
502 gsi_next (&last);
503 return gsi_stmt (last);
505 else
506 return gimple_seq_first_stmt (*seq_p);
509 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
510 LHS, or for a call argument. */
512 static bool
513 is_gimple_mem_rhs (tree t)
515 /* If we're dealing with a renamable type, either source or dest must be
516 a renamed variable. */
517 if (is_gimple_reg_type (TREE_TYPE (t)))
518 return is_gimple_val (t);
519 else
520 return is_gimple_val (t) || is_gimple_lvalue (t);
523 /* Return true if T is a CALL_EXPR or an expression that can be
524 assigned to a temporary. Note that this predicate should only be
525 used during gimplification. See the rationale for this in
526 gimplify_modify_expr. */
528 static bool
529 is_gimple_reg_rhs_or_call (tree t)
531 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
532 || TREE_CODE (t) == CALL_EXPR);
535 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
536 this predicate should only be used during gimplification. See the
537 rationale for this in gimplify_modify_expr. */
539 static bool
540 is_gimple_mem_rhs_or_call (tree t)
542 /* If we're dealing with a renamable type, either source or dest must be
543 a renamed variable. */
544 if (is_gimple_reg_type (TREE_TYPE (t)))
545 return is_gimple_val (t);
546 else
547 return (is_gimple_val (t)
548 || is_gimple_lvalue (t)
549 || TREE_CLOBBER_P (t)
550 || TREE_CODE (t) == CALL_EXPR);
553 /* Create a temporary with a name derived from VAL. Subroutine of
554 lookup_tmp_var; nobody else should call this function. */
556 static inline tree
557 create_tmp_from_val (tree val)
559 /* Drop all qualifiers and address-space information from the value type. */
560 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
561 tree var = create_tmp_var (type, get_name (val));
562 return var;
565 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
566 an existing expression temporary. */
568 static tree
569 lookup_tmp_var (tree val, bool is_formal)
571 tree ret;
573 /* If not optimizing, never really reuse a temporary. local-alloc
574 won't allocate any variable that is used in more than one basic
575 block, which means it will go into memory, causing much extra
576 work in reload and final and poorer code generation, outweighing
577 the extra memory allocation here. */
578 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
579 ret = create_tmp_from_val (val);
580 else
582 elt_t elt, *elt_p;
583 elt_t **slot;
585 elt.val = val;
586 if (!gimplify_ctxp->temp_htab)
587 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
588 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
589 if (*slot == NULL)
591 elt_p = XNEW (elt_t);
592 elt_p->val = val;
593 elt_p->temp = ret = create_tmp_from_val (val);
594 *slot = elt_p;
596 else
598 elt_p = *slot;
599 ret = elt_p->temp;
603 return ret;
606 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
608 static tree
609 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
610 bool is_formal, bool allow_ssa)
612 tree t, mod;
614 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
615 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
616 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
617 fb_rvalue);
619 if (allow_ssa
620 && gimplify_ctxp->into_ssa
621 && is_gimple_reg_type (TREE_TYPE (val)))
623 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
624 if (! gimple_in_ssa_p (cfun))
626 const char *name = get_name (val);
627 if (name)
628 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
631 else
632 t = lookup_tmp_var (val, is_formal);
634 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
636 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
638 /* gimplify_modify_expr might want to reduce this further. */
639 gimplify_and_add (mod, pre_p);
640 ggc_free (mod);
642 return t;
645 /* Return a formal temporary variable initialized with VAL. PRE_P is as
646 in gimplify_expr. Only use this function if:
648 1) The value of the unfactored expression represented by VAL will not
649 change between the initialization and use of the temporary, and
650 2) The temporary will not be otherwise modified.
652 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
653 and #2 means it is inappropriate for && temps.
655 For other cases, use get_initialized_tmp_var instead. */
657 tree
658 get_formal_tmp_var (tree val, gimple_seq *pre_p)
660 return internal_get_tmp_var (val, pre_p, NULL, true, true);
663 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
664 are as in gimplify_expr. */
666 tree
667 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
668 gimple_seq *post_p /* = NULL */,
669 bool allow_ssa /* = true */)
671 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
674 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
675 generate debug info for them; otherwise don't. */
677 void
678 declare_vars (tree vars, gimple *gs, bool debug_info)
680 tree last = vars;
681 if (last)
683 tree temps, block;
685 gbind *scope = as_a <gbind *> (gs);
687 temps = nreverse (last);
689 block = gimple_bind_block (scope);
690 gcc_assert (!block || TREE_CODE (block) == BLOCK);
691 if (!block || !debug_info)
693 DECL_CHAIN (last) = gimple_bind_vars (scope);
694 gimple_bind_set_vars (scope, temps);
696 else
698 /* We need to attach the nodes both to the BIND_EXPR and to its
699 associated BLOCK for debugging purposes. The key point here
700 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
701 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
702 if (BLOCK_VARS (block))
703 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
704 else
706 gimple_bind_set_vars (scope,
707 chainon (gimple_bind_vars (scope), temps));
708 BLOCK_VARS (block) = temps;
714 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
715 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
716 no such upper bound can be obtained. */
718 static void
719 force_constant_size (tree var)
721 /* The only attempt we make is by querying the maximum size of objects
722 of the variable's type. */
724 HOST_WIDE_INT max_size;
726 gcc_assert (VAR_P (var));
728 max_size = max_int_size_in_bytes (TREE_TYPE (var));
730 gcc_assert (max_size >= 0);
732 DECL_SIZE_UNIT (var)
733 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
734 DECL_SIZE (var)
735 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
738 /* Push the temporary variable TMP into the current binding. */
740 void
741 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
743 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
745 /* Later processing assumes that the object size is constant, which might
746 not be true at this point. Force the use of a constant upper bound in
747 this case. */
748 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
749 force_constant_size (tmp);
751 DECL_CONTEXT (tmp) = fn->decl;
752 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
754 record_vars_into (tmp, fn->decl);
757 /* Push the temporary variable TMP into the current binding. */
759 void
760 gimple_add_tmp_var (tree tmp)
762 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
764 /* Later processing assumes that the object size is constant, which might
765 not be true at this point. Force the use of a constant upper bound in
766 this case. */
767 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
768 force_constant_size (tmp);
770 DECL_CONTEXT (tmp) = current_function_decl;
771 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
773 if (gimplify_ctxp)
775 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
776 gimplify_ctxp->temps = tmp;
778 /* Mark temporaries local within the nearest enclosing parallel. */
779 if (gimplify_omp_ctxp)
781 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
782 int flag = GOVD_LOCAL;
783 while (ctx
784 && (ctx->region_type == ORT_WORKSHARE
785 || ctx->region_type == ORT_TASKGROUP
786 || ctx->region_type == ORT_SIMD
787 || ctx->region_type == ORT_ACC))
789 if (ctx->region_type == ORT_SIMD
790 && TREE_ADDRESSABLE (tmp)
791 && !TREE_STATIC (tmp))
793 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
794 ctx->add_safelen1 = true;
795 else
796 flag = GOVD_PRIVATE;
797 break;
799 ctx = ctx->outer_context;
801 if (ctx)
802 omp_add_variable (ctx, tmp, flag | GOVD_SEEN);
805 else if (cfun)
806 record_vars (tmp);
807 else
809 gimple_seq body_seq;
811 /* This case is for nested functions. We need to expose the locals
812 they create. */
813 body_seq = gimple_body (current_function_decl);
814 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
820 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
821 nodes that are referenced more than once in GENERIC functions. This is
822 necessary because gimplification (translation into GIMPLE) is performed
823 by modifying tree nodes in-place, so gimplication of a shared node in a
824 first context could generate an invalid GIMPLE form in a second context.
826 This is achieved with a simple mark/copy/unmark algorithm that walks the
827 GENERIC representation top-down, marks nodes with TREE_VISITED the first
828 time it encounters them, duplicates them if they already have TREE_VISITED
829 set, and finally removes the TREE_VISITED marks it has set.
831 The algorithm works only at the function level, i.e. it generates a GENERIC
832 representation of a function with no nodes shared within the function when
833 passed a GENERIC function (except for nodes that are allowed to be shared).
835 At the global level, it is also necessary to unshare tree nodes that are
836 referenced in more than one function, for the same aforementioned reason.
837 This requires some cooperation from the front-end. There are 2 strategies:
839 1. Manual unsharing. The front-end needs to call unshare_expr on every
840 expression that might end up being shared across functions.
842 2. Deep unsharing. This is an extension of regular unsharing. Instead
843 of calling unshare_expr on expressions that might be shared across
844 functions, the front-end pre-marks them with TREE_VISITED. This will
845 ensure that they are unshared on the first reference within functions
846 when the regular unsharing algorithm runs. The counterpart is that
847 this algorithm must look deeper than for manual unsharing, which is
848 specified by LANG_HOOKS_DEEP_UNSHARING.
850 If there are only few specific cases of node sharing across functions, it is
851 probably easier for a front-end to unshare the expressions manually. On the
852 contrary, if the expressions generated at the global level are as widespread
853 as expressions generated within functions, deep unsharing is very likely the
854 way to go. */
856 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
857 These nodes model computations that must be done once. If we were to
858 unshare something like SAVE_EXPR(i++), the gimplification process would
859 create wrong code. However, if DATA is non-null, it must hold a pointer
860 set that is used to unshare the subtrees of these nodes. */
862 static tree
863 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
865 tree t = *tp;
866 enum tree_code code = TREE_CODE (t);
868 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
869 copy their subtrees if we can make sure to do it only once. */
870 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
872 if (data && !((hash_set<tree> *)data)->add (t))
874 else
875 *walk_subtrees = 0;
878 /* Stop at types, decls, constants like copy_tree_r. */
879 else if (TREE_CODE_CLASS (code) == tcc_type
880 || TREE_CODE_CLASS (code) == tcc_declaration
881 || TREE_CODE_CLASS (code) == tcc_constant)
882 *walk_subtrees = 0;
884 /* Cope with the statement expression extension. */
885 else if (code == STATEMENT_LIST)
888 /* Leave the bulk of the work to copy_tree_r itself. */
889 else
890 copy_tree_r (tp, walk_subtrees, NULL);
892 return NULL_TREE;
895 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
896 If *TP has been visited already, then *TP is deeply copied by calling
897 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
899 static tree
900 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
902 tree t = *tp;
903 enum tree_code code = TREE_CODE (t);
905 /* Skip types, decls, and constants. But we do want to look at their
906 types and the bounds of types. Mark them as visited so we properly
907 unmark their subtrees on the unmark pass. If we've already seen them,
908 don't look down further. */
909 if (TREE_CODE_CLASS (code) == tcc_type
910 || TREE_CODE_CLASS (code) == tcc_declaration
911 || TREE_CODE_CLASS (code) == tcc_constant)
913 if (TREE_VISITED (t))
914 *walk_subtrees = 0;
915 else
916 TREE_VISITED (t) = 1;
919 /* If this node has been visited already, unshare it and don't look
920 any deeper. */
921 else if (TREE_VISITED (t))
923 walk_tree (tp, mostly_copy_tree_r, data, NULL);
924 *walk_subtrees = 0;
927 /* Otherwise, mark the node as visited and keep looking. */
928 else
929 TREE_VISITED (t) = 1;
931 return NULL_TREE;
934 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
935 copy_if_shared_r callback unmodified. */
937 void
938 copy_if_shared (tree *tp, void *data)
940 walk_tree (tp, copy_if_shared_r, data, NULL);
943 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
944 any nested functions. */
946 static void
947 unshare_body (tree fndecl)
949 struct cgraph_node *cgn = cgraph_node::get (fndecl);
950 /* If the language requires deep unsharing, we need a pointer set to make
951 sure we don't repeatedly unshare subtrees of unshareable nodes. */
952 hash_set<tree> *visited
953 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
955 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
956 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
957 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
959 delete visited;
961 if (cgn)
962 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
963 unshare_body (cgn->decl);
966 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
967 Subtrees are walked until the first unvisited node is encountered. */
969 static tree
970 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
972 tree t = *tp;
974 /* If this node has been visited, unmark it and keep looking. */
975 if (TREE_VISITED (t))
976 TREE_VISITED (t) = 0;
978 /* Otherwise, don't look any deeper. */
979 else
980 *walk_subtrees = 0;
982 return NULL_TREE;
985 /* Unmark the visited trees rooted at *TP. */
987 static inline void
988 unmark_visited (tree *tp)
990 walk_tree (tp, unmark_visited_r, NULL, NULL);
993 /* Likewise, but mark all trees as not visited. */
995 static void
996 unvisit_body (tree fndecl)
998 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1000 unmark_visited (&DECL_SAVED_TREE (fndecl));
1001 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1002 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1004 if (cgn)
1005 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1006 unvisit_body (cgn->decl);
1009 /* Unconditionally make an unshared copy of EXPR. This is used when using
1010 stored expressions which span multiple functions, such as BINFO_VTABLE,
1011 as the normal unsharing process can't tell that they're shared. */
1013 tree
1014 unshare_expr (tree expr)
1016 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1017 return expr;
1020 /* Worker for unshare_expr_without_location. */
1022 static tree
1023 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1025 if (EXPR_P (*tp))
1026 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1027 else
1028 *walk_subtrees = 0;
1029 return NULL_TREE;
1032 /* Similar to unshare_expr but also prune all expression locations
1033 from EXPR. */
1035 tree
1036 unshare_expr_without_location (tree expr)
1038 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1039 if (EXPR_P (expr))
1040 walk_tree (&expr, prune_expr_location, NULL, NULL);
1041 return expr;
1044 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1045 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1046 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1047 EXPR is the location of the EXPR. */
1049 static location_t
1050 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1052 if (!expr)
1053 return or_else;
1055 if (EXPR_HAS_LOCATION (expr))
1056 return EXPR_LOCATION (expr);
1058 if (TREE_CODE (expr) != STATEMENT_LIST)
1059 return or_else;
1061 tree_stmt_iterator i = tsi_start (expr);
1063 bool found = false;
1064 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1066 found = true;
1067 tsi_next (&i);
1070 if (!found || !tsi_one_before_end_p (i))
1071 return or_else;
1073 return rexpr_location (tsi_stmt (i), or_else);
1076 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1077 rexpr_location for the potential recursion. */
1079 static inline bool
1080 rexpr_has_location (tree expr)
1082 return rexpr_location (expr) != UNKNOWN_LOCATION;
1086 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1087 contain statements and have a value. Assign its value to a temporary
1088 and give it void_type_node. Return the temporary, or NULL_TREE if
1089 WRAPPER was already void. */
1091 tree
1092 voidify_wrapper_expr (tree wrapper, tree temp)
1094 tree type = TREE_TYPE (wrapper);
1095 if (type && !VOID_TYPE_P (type))
1097 tree *p;
1099 /* Set p to point to the body of the wrapper. Loop until we find
1100 something that isn't a wrapper. */
1101 for (p = &wrapper; p && *p; )
1103 switch (TREE_CODE (*p))
1105 case BIND_EXPR:
1106 TREE_SIDE_EFFECTS (*p) = 1;
1107 TREE_TYPE (*p) = void_type_node;
1108 /* For a BIND_EXPR, the body is operand 1. */
1109 p = &BIND_EXPR_BODY (*p);
1110 break;
1112 case CLEANUP_POINT_EXPR:
1113 case TRY_FINALLY_EXPR:
1114 case TRY_CATCH_EXPR:
1115 TREE_SIDE_EFFECTS (*p) = 1;
1116 TREE_TYPE (*p) = void_type_node;
1117 p = &TREE_OPERAND (*p, 0);
1118 break;
1120 case STATEMENT_LIST:
1122 tree_stmt_iterator i = tsi_last (*p);
1123 TREE_SIDE_EFFECTS (*p) = 1;
1124 TREE_TYPE (*p) = void_type_node;
1125 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1127 break;
1129 case COMPOUND_EXPR:
1130 /* Advance to the last statement. Set all container types to
1131 void. */
1132 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1134 TREE_SIDE_EFFECTS (*p) = 1;
1135 TREE_TYPE (*p) = void_type_node;
1137 break;
1139 case TRANSACTION_EXPR:
1140 TREE_SIDE_EFFECTS (*p) = 1;
1141 TREE_TYPE (*p) = void_type_node;
1142 p = &TRANSACTION_EXPR_BODY (*p);
1143 break;
1145 default:
1146 /* Assume that any tree upon which voidify_wrapper_expr is
1147 directly called is a wrapper, and that its body is op0. */
1148 if (p == &wrapper)
1150 TREE_SIDE_EFFECTS (*p) = 1;
1151 TREE_TYPE (*p) = void_type_node;
1152 p = &TREE_OPERAND (*p, 0);
1153 break;
1155 goto out;
1159 out:
1160 if (p == NULL || IS_EMPTY_STMT (*p))
1161 temp = NULL_TREE;
1162 else if (temp)
1164 /* The wrapper is on the RHS of an assignment that we're pushing
1165 down. */
1166 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1167 || TREE_CODE (temp) == MODIFY_EXPR);
1168 TREE_OPERAND (temp, 1) = *p;
1169 *p = temp;
1171 else
1173 temp = create_tmp_var (type, "retval");
1174 *p = build2 (INIT_EXPR, type, temp, *p);
1177 return temp;
1180 return NULL_TREE;
1183 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1184 a temporary through which they communicate. */
1186 static void
1187 build_stack_save_restore (gcall **save, gcall **restore)
1189 tree tmp_var;
1191 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1192 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1193 gimple_call_set_lhs (*save, tmp_var);
1195 *restore
1196 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1197 1, tmp_var);
1200 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1202 static tree
1203 build_asan_poison_call_expr (tree decl)
1205 /* Do not poison variables that have size equal to zero. */
1206 tree unit_size = DECL_SIZE_UNIT (decl);
1207 if (zerop (unit_size))
1208 return NULL_TREE;
1210 tree base = build_fold_addr_expr (decl);
1212 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1213 void_type_node, 3,
1214 build_int_cst (integer_type_node,
1215 ASAN_MARK_POISON),
1216 base, unit_size);
1219 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1220 on POISON flag, shadow memory of a DECL variable. The call will be
1221 put on location identified by IT iterator, where BEFORE flag drives
1222 position where the stmt will be put. */
1224 static void
1225 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1226 bool before)
1228 tree unit_size = DECL_SIZE_UNIT (decl);
1229 tree base = build_fold_addr_expr (decl);
1231 /* Do not poison variables that have size equal to zero. */
1232 if (zerop (unit_size))
1233 return;
1235 /* It's necessary to have all stack variables aligned to ASAN granularity
1236 bytes. */
1237 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1238 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1240 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1242 gimple *g
1243 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1244 build_int_cst (integer_type_node, flags),
1245 base, unit_size);
1247 if (before)
1248 gsi_insert_before (it, g, GSI_NEW_STMT);
1249 else
1250 gsi_insert_after (it, g, GSI_NEW_STMT);
1253 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1254 either poisons or unpoisons a DECL. Created statement is appended
1255 to SEQ_P gimple sequence. */
1257 static void
1258 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1260 gimple_stmt_iterator it = gsi_last (*seq_p);
1261 bool before = false;
1263 if (gsi_end_p (it))
1264 before = true;
1266 asan_poison_variable (decl, poison, &it, before);
1269 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1271 static int
1272 sort_by_decl_uid (const void *a, const void *b)
1274 const tree *t1 = (const tree *)a;
1275 const tree *t2 = (const tree *)b;
1277 int uid1 = DECL_UID (*t1);
1278 int uid2 = DECL_UID (*t2);
1280 if (uid1 < uid2)
1281 return -1;
1282 else if (uid1 > uid2)
1283 return 1;
1284 else
1285 return 0;
1288 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1289 depending on POISON flag. Created statement is appended
1290 to SEQ_P gimple sequence. */
1292 static void
1293 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1295 unsigned c = variables->elements ();
1296 if (c == 0)
1297 return;
1299 auto_vec<tree> sorted_variables (c);
1301 for (hash_set<tree>::iterator it = variables->begin ();
1302 it != variables->end (); ++it)
1303 sorted_variables.safe_push (*it);
1305 sorted_variables.qsort (sort_by_decl_uid);
1307 unsigned i;
1308 tree var;
1309 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1311 asan_poison_variable (var, poison, seq_p);
1313 /* Add use_after_scope_memory attribute for the variable in order
1314 to prevent re-written into SSA. */
1315 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1316 DECL_ATTRIBUTES (var)))
1317 DECL_ATTRIBUTES (var)
1318 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1319 integer_one_node,
1320 DECL_ATTRIBUTES (var));
1324 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1326 static enum gimplify_status
1327 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1329 tree bind_expr = *expr_p;
1330 bool old_keep_stack = gimplify_ctxp->keep_stack;
1331 bool old_save_stack = gimplify_ctxp->save_stack;
1332 tree t;
1333 gbind *bind_stmt;
1334 gimple_seq body, cleanup;
1335 gcall *stack_save;
1336 location_t start_locus = 0, end_locus = 0;
1337 tree ret_clauses = NULL;
1339 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1341 /* Mark variables seen in this bind expr. */
1342 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1344 if (VAR_P (t))
1346 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1348 /* Mark variable as local. */
1349 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1351 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1352 || splay_tree_lookup (ctx->variables,
1353 (splay_tree_key) t) == NULL)
1355 int flag = GOVD_LOCAL;
1356 if (ctx->region_type == ORT_SIMD
1357 && TREE_ADDRESSABLE (t)
1358 && !TREE_STATIC (t))
1360 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1361 ctx->add_safelen1 = true;
1362 else
1363 flag = GOVD_PRIVATE;
1365 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1367 /* Static locals inside of target construct or offloaded
1368 routines need to be "omp declare target". */
1369 if (TREE_STATIC (t))
1370 for (; ctx; ctx = ctx->outer_context)
1371 if ((ctx->region_type & ORT_TARGET) != 0)
1373 if (!lookup_attribute ("omp declare target",
1374 DECL_ATTRIBUTES (t)))
1376 tree id = get_identifier ("omp declare target");
1377 DECL_ATTRIBUTES (t)
1378 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1379 varpool_node *node = varpool_node::get (t);
1380 if (node)
1382 node->offloadable = 1;
1383 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1385 g->have_offload = true;
1386 if (!in_lto_p)
1387 vec_safe_push (offload_vars, t);
1391 break;
1395 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1397 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1398 cfun->has_local_explicit_reg_vars = true;
1402 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1403 BIND_EXPR_BLOCK (bind_expr));
1404 gimple_push_bind_expr (bind_stmt);
1406 gimplify_ctxp->keep_stack = false;
1407 gimplify_ctxp->save_stack = false;
1409 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1410 body = NULL;
1411 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1412 gimple_bind_set_body (bind_stmt, body);
1414 /* Source location wise, the cleanup code (stack_restore and clobbers)
1415 belongs to the end of the block, so propagate what we have. The
1416 stack_save operation belongs to the beginning of block, which we can
1417 infer from the bind_expr directly if the block has no explicit
1418 assignment. */
1419 if (BIND_EXPR_BLOCK (bind_expr))
1421 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1422 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1424 if (start_locus == 0)
1425 start_locus = EXPR_LOCATION (bind_expr);
1427 cleanup = NULL;
1428 stack_save = NULL;
1430 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1431 the stack space allocated to the VLAs. */
1432 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1434 gcall *stack_restore;
1436 /* Save stack on entry and restore it on exit. Add a try_finally
1437 block to achieve this. */
1438 build_stack_save_restore (&stack_save, &stack_restore);
1440 gimple_set_location (stack_save, start_locus);
1441 gimple_set_location (stack_restore, end_locus);
1443 gimplify_seq_add_stmt (&cleanup, stack_restore);
1446 /* Add clobbers for all variables that go out of scope. */
1447 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1449 if (VAR_P (t)
1450 && !is_global_var (t)
1451 && DECL_CONTEXT (t) == current_function_decl)
1453 if (!DECL_HARD_REGISTER (t)
1454 && !TREE_THIS_VOLATILE (t)
1455 && !DECL_HAS_VALUE_EXPR_P (t)
1456 /* Only care for variables that have to be in memory. Others
1457 will be rewritten into SSA names, hence moved to the
1458 top-level. */
1459 && !is_gimple_reg (t)
1460 && flag_stack_reuse != SR_NONE)
1462 tree clobber = build_clobber (TREE_TYPE (t));
1463 gimple *clobber_stmt;
1464 clobber_stmt = gimple_build_assign (t, clobber);
1465 gimple_set_location (clobber_stmt, end_locus);
1466 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1469 if (flag_openacc && oacc_declare_returns != NULL)
1471 tree *c = oacc_declare_returns->get (t);
1472 if (c != NULL)
1474 if (ret_clauses)
1475 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1477 ret_clauses = *c;
1479 oacc_declare_returns->remove (t);
1481 if (oacc_declare_returns->is_empty ())
1483 delete oacc_declare_returns;
1484 oacc_declare_returns = NULL;
1490 if (asan_poisoned_variables != NULL
1491 && asan_poisoned_variables->contains (t))
1493 asan_poisoned_variables->remove (t);
1494 asan_poison_variable (t, true, &cleanup);
1497 if (gimplify_ctxp->live_switch_vars != NULL
1498 && gimplify_ctxp->live_switch_vars->contains (t))
1499 gimplify_ctxp->live_switch_vars->remove (t);
1502 if (ret_clauses)
1504 gomp_target *stmt;
1505 gimple_stmt_iterator si = gsi_start (cleanup);
1507 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1508 ret_clauses);
1509 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1512 if (cleanup)
1514 gtry *gs;
1515 gimple_seq new_body;
1517 new_body = NULL;
1518 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1519 GIMPLE_TRY_FINALLY);
1521 if (stack_save)
1522 gimplify_seq_add_stmt (&new_body, stack_save);
1523 gimplify_seq_add_stmt (&new_body, gs);
1524 gimple_bind_set_body (bind_stmt, new_body);
1527 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1528 if (!gimplify_ctxp->keep_stack)
1529 gimplify_ctxp->keep_stack = old_keep_stack;
1530 gimplify_ctxp->save_stack = old_save_stack;
1532 gimple_pop_bind_expr ();
1534 gimplify_seq_add_stmt (pre_p, bind_stmt);
1536 if (temp)
1538 *expr_p = temp;
1539 return GS_OK;
1542 *expr_p = NULL_TREE;
1543 return GS_ALL_DONE;
1546 /* Maybe add early return predict statement to PRE_P sequence. */
1548 static void
1549 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1551 /* If we are not in a conditional context, add PREDICT statement. */
1552 if (gimple_conditional_context ())
1554 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1555 NOT_TAKEN);
1556 gimplify_seq_add_stmt (pre_p, predict);
1560 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1561 GIMPLE value, it is assigned to a new temporary and the statement is
1562 re-written to return the temporary.
1564 PRE_P points to the sequence where side effects that must happen before
1565 STMT should be stored. */
1567 static enum gimplify_status
1568 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1570 greturn *ret;
1571 tree ret_expr = TREE_OPERAND (stmt, 0);
1572 tree result_decl, result;
1574 if (ret_expr == error_mark_node)
1575 return GS_ERROR;
1577 if (!ret_expr
1578 || TREE_CODE (ret_expr) == RESULT_DECL)
1580 maybe_add_early_return_predict_stmt (pre_p);
1581 greturn *ret = gimple_build_return (ret_expr);
1582 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1583 gimplify_seq_add_stmt (pre_p, ret);
1584 return GS_ALL_DONE;
1587 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1588 result_decl = NULL_TREE;
1589 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1591 /* Used in C++ for handling EH cleanup of the return value if a local
1592 cleanup throws. Assume the front-end knows what it's doing. */
1593 result_decl = DECL_RESULT (current_function_decl);
1594 /* But crash if we end up trying to modify ret_expr below. */
1595 ret_expr = NULL_TREE;
1597 else
1599 result_decl = TREE_OPERAND (ret_expr, 0);
1601 /* See through a return by reference. */
1602 if (TREE_CODE (result_decl) == INDIRECT_REF)
1603 result_decl = TREE_OPERAND (result_decl, 0);
1605 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1606 || TREE_CODE (ret_expr) == INIT_EXPR)
1607 && TREE_CODE (result_decl) == RESULT_DECL);
1610 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1611 Recall that aggregate_value_p is FALSE for any aggregate type that is
1612 returned in registers. If we're returning values in registers, then
1613 we don't want to extend the lifetime of the RESULT_DECL, particularly
1614 across another call. In addition, for those aggregates for which
1615 hard_function_value generates a PARALLEL, we'll die during normal
1616 expansion of structure assignments; there's special code in expand_return
1617 to handle this case that does not exist in expand_expr. */
1618 if (!result_decl)
1619 result = NULL_TREE;
1620 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1622 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1624 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1625 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1626 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1627 should be effectively allocated by the caller, i.e. all calls to
1628 this function must be subject to the Return Slot Optimization. */
1629 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1630 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1632 result = result_decl;
1634 else if (gimplify_ctxp->return_temp)
1635 result = gimplify_ctxp->return_temp;
1636 else
1638 result = create_tmp_reg (TREE_TYPE (result_decl));
1640 /* ??? With complex control flow (usually involving abnormal edges),
1641 we can wind up warning about an uninitialized value for this. Due
1642 to how this variable is constructed and initialized, this is never
1643 true. Give up and never warn. */
1644 TREE_NO_WARNING (result) = 1;
1646 gimplify_ctxp->return_temp = result;
1649 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1650 Then gimplify the whole thing. */
1651 if (result != result_decl)
1652 TREE_OPERAND (ret_expr, 0) = result;
1654 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1656 maybe_add_early_return_predict_stmt (pre_p);
1657 ret = gimple_build_return (result);
1658 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1659 gimplify_seq_add_stmt (pre_p, ret);
1661 return GS_ALL_DONE;
1664 /* Gimplify a variable-length array DECL. */
1666 static void
1667 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1669 /* This is a variable-sized decl. Simplify its size and mark it
1670 for deferred expansion. */
1671 tree t, addr, ptr_type;
1673 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1674 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1676 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1677 if (DECL_HAS_VALUE_EXPR_P (decl))
1678 return;
1680 /* All occurrences of this decl in final gimplified code will be
1681 replaced by indirection. Setting DECL_VALUE_EXPR does two
1682 things: First, it lets the rest of the gimplifier know what
1683 replacement to use. Second, it lets the debug info know
1684 where to find the value. */
1685 ptr_type = build_pointer_type (TREE_TYPE (decl));
1686 addr = create_tmp_var (ptr_type, get_name (decl));
1687 DECL_IGNORED_P (addr) = 0;
1688 t = build_fold_indirect_ref (addr);
1689 TREE_THIS_NOTRAP (t) = 1;
1690 SET_DECL_VALUE_EXPR (decl, t);
1691 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1693 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1694 max_int_size_in_bytes (TREE_TYPE (decl)));
1695 /* The call has been built for a variable-sized object. */
1696 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1697 t = fold_convert (ptr_type, t);
1698 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1700 gimplify_and_add (t, seq_p);
1702 /* Record the dynamic allocation associated with DECL if requested. */
1703 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1704 record_dynamic_alloc (decl);
1707 /* A helper function to be called via walk_tree. Mark all labels under *TP
1708 as being forced. To be called for DECL_INITIAL of static variables. */
1710 static tree
1711 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1713 if (TYPE_P (*tp))
1714 *walk_subtrees = 0;
1715 if (TREE_CODE (*tp) == LABEL_DECL)
1717 FORCED_LABEL (*tp) = 1;
1718 cfun->has_forced_label_in_static = 1;
1721 return NULL_TREE;
1724 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1725 and initialization explicit. */
1727 static enum gimplify_status
1728 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1730 tree stmt = *stmt_p;
1731 tree decl = DECL_EXPR_DECL (stmt);
1733 *stmt_p = NULL_TREE;
1735 if (TREE_TYPE (decl) == error_mark_node)
1736 return GS_ERROR;
1738 if ((TREE_CODE (decl) == TYPE_DECL
1739 || VAR_P (decl))
1740 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1742 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1743 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1744 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1747 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1748 in case its size expressions contain problematic nodes like CALL_EXPR. */
1749 if (TREE_CODE (decl) == TYPE_DECL
1750 && DECL_ORIGINAL_TYPE (decl)
1751 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1753 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1754 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1755 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1758 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1760 tree init = DECL_INITIAL (decl);
1761 bool is_vla = false;
1763 poly_uint64 size;
1764 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1765 || (!TREE_STATIC (decl)
1766 && flag_stack_check == GENERIC_STACK_CHECK
1767 && maybe_gt (size,
1768 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1770 gimplify_vla_decl (decl, seq_p);
1771 is_vla = true;
1774 if (asan_poisoned_variables
1775 && !is_vla
1776 && TREE_ADDRESSABLE (decl)
1777 && !TREE_STATIC (decl)
1778 && !DECL_HAS_VALUE_EXPR_P (decl)
1779 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1780 && dbg_cnt (asan_use_after_scope)
1781 && !gimplify_omp_ctxp)
1783 asan_poisoned_variables->add (decl);
1784 asan_poison_variable (decl, false, seq_p);
1785 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1786 gimplify_ctxp->live_switch_vars->add (decl);
1789 /* Some front ends do not explicitly declare all anonymous
1790 artificial variables. We compensate here by declaring the
1791 variables, though it would be better if the front ends would
1792 explicitly declare them. */
1793 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1794 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1795 gimple_add_tmp_var (decl);
1797 if (init && init != error_mark_node)
1799 if (!TREE_STATIC (decl))
1801 DECL_INITIAL (decl) = NULL_TREE;
1802 init = build2 (INIT_EXPR, void_type_node, decl, init);
1803 gimplify_and_add (init, seq_p);
1804 ggc_free (init);
1806 else
1807 /* We must still examine initializers for static variables
1808 as they may contain a label address. */
1809 walk_tree (&init, force_labels_r, NULL, NULL);
1813 return GS_ALL_DONE;
1816 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1817 and replacing the LOOP_EXPR with goto, but if the loop contains an
1818 EXIT_EXPR, we need to append a label for it to jump to. */
1820 static enum gimplify_status
1821 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1823 tree saved_label = gimplify_ctxp->exit_label;
1824 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1826 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1828 gimplify_ctxp->exit_label = NULL_TREE;
1830 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1832 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1834 if (gimplify_ctxp->exit_label)
1835 gimplify_seq_add_stmt (pre_p,
1836 gimple_build_label (gimplify_ctxp->exit_label));
1838 gimplify_ctxp->exit_label = saved_label;
1840 *expr_p = NULL;
1841 return GS_ALL_DONE;
1844 /* Gimplify a statement list onto a sequence. These may be created either
1845 by an enlightened front-end, or by shortcut_cond_expr. */
1847 static enum gimplify_status
1848 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1850 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1852 tree_stmt_iterator i = tsi_start (*expr_p);
1854 while (!tsi_end_p (i))
1856 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1857 tsi_delink (&i);
1860 if (temp)
1862 *expr_p = temp;
1863 return GS_OK;
1866 return GS_ALL_DONE;
1869 /* Callback for walk_gimple_seq. */
1871 static tree
1872 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1873 struct walk_stmt_info *wi)
1875 gimple *stmt = gsi_stmt (*gsi_p);
1877 *handled_ops_p = true;
1878 switch (gimple_code (stmt))
1880 case GIMPLE_TRY:
1881 /* A compiler-generated cleanup or a user-written try block.
1882 If it's empty, don't dive into it--that would result in
1883 worse location info. */
1884 if (gimple_try_eval (stmt) == NULL)
1886 wi->info = stmt;
1887 return integer_zero_node;
1889 /* Fall through. */
1890 case GIMPLE_BIND:
1891 case GIMPLE_CATCH:
1892 case GIMPLE_EH_FILTER:
1893 case GIMPLE_TRANSACTION:
1894 /* Walk the sub-statements. */
1895 *handled_ops_p = false;
1896 break;
1898 case GIMPLE_DEBUG:
1899 /* Ignore these. We may generate them before declarations that
1900 are never executed. If there's something to warn about,
1901 there will be non-debug stmts too, and we'll catch those. */
1902 break;
1904 case GIMPLE_CALL:
1905 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1907 *handled_ops_p = false;
1908 break;
1910 /* Fall through. */
1911 default:
1912 /* Save the first "real" statement (not a decl/lexical scope/...). */
1913 wi->info = stmt;
1914 return integer_zero_node;
1916 return NULL_TREE;
1919 /* Possibly warn about unreachable statements between switch's controlling
1920 expression and the first case. SEQ is the body of a switch expression. */
1922 static void
1923 maybe_warn_switch_unreachable (gimple_seq seq)
1925 if (!warn_switch_unreachable
1926 /* This warning doesn't play well with Fortran when optimizations
1927 are on. */
1928 || lang_GNU_Fortran ()
1929 || seq == NULL)
1930 return;
1932 struct walk_stmt_info wi;
1933 memset (&wi, 0, sizeof (wi));
1934 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1935 gimple *stmt = (gimple *) wi.info;
1937 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1939 if (gimple_code (stmt) == GIMPLE_GOTO
1940 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1941 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1942 /* Don't warn for compiler-generated gotos. These occur
1943 in Duff's devices, for example. */;
1944 else
1945 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1946 "statement will never be executed");
1951 /* A label entry that pairs label and a location. */
1952 struct label_entry
1954 tree label;
1955 location_t loc;
1958 /* Find LABEL in vector of label entries VEC. */
1960 static struct label_entry *
1961 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1963 unsigned int i;
1964 struct label_entry *l;
1966 FOR_EACH_VEC_ELT (*vec, i, l)
1967 if (l->label == label)
1968 return l;
1969 return NULL;
1972 /* Return true if LABEL, a LABEL_DECL, represents a case label
1973 in a vector of labels CASES. */
1975 static bool
1976 case_label_p (const vec<tree> *cases, tree label)
1978 unsigned int i;
1979 tree l;
1981 FOR_EACH_VEC_ELT (*cases, i, l)
1982 if (CASE_LABEL (l) == label)
1983 return true;
1984 return false;
1987 /* Find the last nondebug statement in a scope STMT. */
1989 static gimple *
1990 last_stmt_in_scope (gimple *stmt)
1992 if (!stmt)
1993 return NULL;
1995 switch (gimple_code (stmt))
1997 case GIMPLE_BIND:
1999 gbind *bind = as_a <gbind *> (stmt);
2000 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2001 return last_stmt_in_scope (stmt);
2004 case GIMPLE_TRY:
2006 gtry *try_stmt = as_a <gtry *> (stmt);
2007 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2008 gimple *last_eval = last_stmt_in_scope (stmt);
2009 if (gimple_stmt_may_fallthru (last_eval)
2010 && (last_eval == NULL
2011 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2012 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2014 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2015 return last_stmt_in_scope (stmt);
2017 else
2018 return last_eval;
2021 case GIMPLE_DEBUG:
2022 gcc_unreachable ();
2024 default:
2025 return stmt;
2029 /* Collect interesting labels in LABELS and return the statement preceding
2030 another case label, or a user-defined label. Store a location useful
2031 to give warnings at *PREVLOC (usually the location of the returned
2032 statement or of its surrounding scope). */
2034 static gimple *
2035 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2036 auto_vec <struct label_entry> *labels,
2037 location_t *prevloc)
2039 gimple *prev = NULL;
2041 *prevloc = UNKNOWN_LOCATION;
2044 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2046 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2047 which starts on a GIMPLE_SWITCH and ends with a break label.
2048 Handle that as a single statement that can fall through. */
2049 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2050 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2051 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2052 if (last
2053 && gimple_code (first) == GIMPLE_SWITCH
2054 && gimple_code (last) == GIMPLE_LABEL)
2056 tree label = gimple_label_label (as_a <glabel *> (last));
2057 if (SWITCH_BREAK_LABEL_P (label))
2059 prev = bind;
2060 gsi_next (gsi_p);
2061 continue;
2065 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2066 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2068 /* Nested scope. Only look at the last statement of
2069 the innermost scope. */
2070 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2071 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2072 if (last)
2074 prev = last;
2075 /* It might be a label without a location. Use the
2076 location of the scope then. */
2077 if (!gimple_has_location (prev))
2078 *prevloc = bind_loc;
2080 gsi_next (gsi_p);
2081 continue;
2084 /* Ifs are tricky. */
2085 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2087 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2088 tree false_lab = gimple_cond_false_label (cond_stmt);
2089 location_t if_loc = gimple_location (cond_stmt);
2091 /* If we have e.g.
2092 if (i > 1) goto <D.2259>; else goto D;
2093 we can't do much with the else-branch. */
2094 if (!DECL_ARTIFICIAL (false_lab))
2095 break;
2097 /* Go on until the false label, then one step back. */
2098 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2100 gimple *stmt = gsi_stmt (*gsi_p);
2101 if (gimple_code (stmt) == GIMPLE_LABEL
2102 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2103 break;
2106 /* Not found? Oops. */
2107 if (gsi_end_p (*gsi_p))
2108 break;
2110 struct label_entry l = { false_lab, if_loc };
2111 labels->safe_push (l);
2113 /* Go to the last statement of the then branch. */
2114 gsi_prev (gsi_p);
2116 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2117 <D.1759>:
2118 <stmt>;
2119 goto <D.1761>;
2120 <D.1760>:
2122 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2123 && !gimple_has_location (gsi_stmt (*gsi_p)))
2125 /* Look at the statement before, it might be
2126 attribute fallthrough, in which case don't warn. */
2127 gsi_prev (gsi_p);
2128 bool fallthru_before_dest
2129 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2130 gsi_next (gsi_p);
2131 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2132 if (!fallthru_before_dest)
2134 struct label_entry l = { goto_dest, if_loc };
2135 labels->safe_push (l);
2138 /* And move back. */
2139 gsi_next (gsi_p);
2142 /* Remember the last statement. Skip labels that are of no interest
2143 to us. */
2144 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2146 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2147 if (find_label_entry (labels, label))
2148 prev = gsi_stmt (*gsi_p);
2150 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2152 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2154 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2155 prev = gsi_stmt (*gsi_p);
2156 gsi_next (gsi_p);
2158 while (!gsi_end_p (*gsi_p)
2159 /* Stop if we find a case or a user-defined label. */
2160 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2161 || !gimple_has_location (gsi_stmt (*gsi_p))));
2163 if (prev && gimple_has_location (prev))
2164 *prevloc = gimple_location (prev);
2165 return prev;
2168 /* Return true if the switch fallthough warning should occur. LABEL is
2169 the label statement that we're falling through to. */
2171 static bool
2172 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2174 gimple_stmt_iterator gsi = *gsi_p;
2176 /* Don't warn if the label is marked with a "falls through" comment. */
2177 if (FALLTHROUGH_LABEL_P (label))
2178 return false;
2180 /* Don't warn for non-case labels followed by a statement:
2181 case 0:
2182 foo ();
2183 label:
2184 bar ();
2185 as these are likely intentional. */
2186 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2188 tree l;
2189 while (!gsi_end_p (gsi)
2190 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2191 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2192 && !case_label_p (&gimplify_ctxp->case_labels, l))
2193 gsi_next_nondebug (&gsi);
2194 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2195 return false;
2198 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2199 immediately breaks. */
2200 gsi = *gsi_p;
2202 /* Skip all immediately following labels. */
2203 while (!gsi_end_p (gsi)
2204 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2205 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2206 gsi_next_nondebug (&gsi);
2208 /* { ... something; default:; } */
2209 if (gsi_end_p (gsi)
2210 /* { ... something; default: break; } or
2211 { ... something; default: goto L; } */
2212 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2213 /* { ... something; default: return; } */
2214 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2215 return false;
2217 return true;
2220 /* Callback for walk_gimple_seq. */
2222 static tree
2223 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2224 struct walk_stmt_info *)
2226 gimple *stmt = gsi_stmt (*gsi_p);
2228 *handled_ops_p = true;
2229 switch (gimple_code (stmt))
2231 case GIMPLE_TRY:
2232 case GIMPLE_BIND:
2233 case GIMPLE_CATCH:
2234 case GIMPLE_EH_FILTER:
2235 case GIMPLE_TRANSACTION:
2236 /* Walk the sub-statements. */
2237 *handled_ops_p = false;
2238 break;
2240 /* Find a sequence of form:
2242 GIMPLE_LABEL
2243 [...]
2244 <may fallthru stmt>
2245 GIMPLE_LABEL
2247 and possibly warn. */
2248 case GIMPLE_LABEL:
2250 /* Found a label. Skip all immediately following labels. */
2251 while (!gsi_end_p (*gsi_p)
2252 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2253 gsi_next_nondebug (gsi_p);
2255 /* There might be no more statements. */
2256 if (gsi_end_p (*gsi_p))
2257 return integer_zero_node;
2259 /* Vector of labels that fall through. */
2260 auto_vec <struct label_entry> labels;
2261 location_t prevloc;
2262 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2264 /* There might be no more statements. */
2265 if (gsi_end_p (*gsi_p))
2266 return integer_zero_node;
2268 gimple *next = gsi_stmt (*gsi_p);
2269 tree label;
2270 /* If what follows is a label, then we may have a fallthrough. */
2271 if (gimple_code (next) == GIMPLE_LABEL
2272 && gimple_has_location (next)
2273 && (label = gimple_label_label (as_a <glabel *> (next)))
2274 && prev != NULL)
2276 struct label_entry *l;
2277 bool warned_p = false;
2278 auto_diagnostic_group d;
2279 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2280 /* Quiet. */;
2281 else if (gimple_code (prev) == GIMPLE_LABEL
2282 && (label = gimple_label_label (as_a <glabel *> (prev)))
2283 && (l = find_label_entry (&labels, label)))
2284 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2285 "this statement may fall through");
2286 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2287 /* Try to be clever and don't warn when the statement
2288 can't actually fall through. */
2289 && gimple_stmt_may_fallthru (prev)
2290 && prevloc != UNKNOWN_LOCATION)
2291 warned_p = warning_at (prevloc,
2292 OPT_Wimplicit_fallthrough_,
2293 "this statement may fall through");
2294 if (warned_p)
2295 inform (gimple_location (next), "here");
2297 /* Mark this label as processed so as to prevent multiple
2298 warnings in nested switches. */
2299 FALLTHROUGH_LABEL_P (label) = true;
2301 /* So that next warn_implicit_fallthrough_r will start looking for
2302 a new sequence starting with this label. */
2303 gsi_prev (gsi_p);
2306 break;
2307 default:
2308 break;
2310 return NULL_TREE;
2313 /* Warn when a switch case falls through. */
2315 static void
2316 maybe_warn_implicit_fallthrough (gimple_seq seq)
2318 if (!warn_implicit_fallthrough)
2319 return;
2321 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2322 if (!(lang_GNU_C ()
2323 || lang_GNU_CXX ()
2324 || lang_GNU_OBJC ()))
2325 return;
2327 struct walk_stmt_info wi;
2328 memset (&wi, 0, sizeof (wi));
2329 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2332 /* Callback for walk_gimple_seq. */
2334 static tree
2335 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2336 struct walk_stmt_info *wi)
2338 gimple *stmt = gsi_stmt (*gsi_p);
2340 *handled_ops_p = true;
2341 switch (gimple_code (stmt))
2343 case GIMPLE_TRY:
2344 case GIMPLE_BIND:
2345 case GIMPLE_CATCH:
2346 case GIMPLE_EH_FILTER:
2347 case GIMPLE_TRANSACTION:
2348 /* Walk the sub-statements. */
2349 *handled_ops_p = false;
2350 break;
2351 case GIMPLE_CALL:
2352 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2354 gsi_remove (gsi_p, true);
2355 if (gsi_end_p (*gsi_p))
2357 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2358 return integer_zero_node;
2361 bool found = false;
2362 location_t loc = gimple_location (stmt);
2364 gimple_stmt_iterator gsi2 = *gsi_p;
2365 stmt = gsi_stmt (gsi2);
2366 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2368 /* Go on until the artificial label. */
2369 tree goto_dest = gimple_goto_dest (stmt);
2370 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2372 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2373 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2374 == goto_dest)
2375 break;
2378 /* Not found? Stop. */
2379 if (gsi_end_p (gsi2))
2380 break;
2382 /* Look one past it. */
2383 gsi_next (&gsi2);
2386 /* We're looking for a case label or default label here. */
2387 while (!gsi_end_p (gsi2))
2389 stmt = gsi_stmt (gsi2);
2390 if (gimple_code (stmt) == GIMPLE_LABEL)
2392 tree label = gimple_label_label (as_a <glabel *> (stmt));
2393 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2395 found = true;
2396 break;
2399 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2401 else if (!is_gimple_debug (stmt))
2402 /* Anything else is not expected. */
2403 break;
2404 gsi_next (&gsi2);
2406 if (!found)
2407 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2408 "a case label or default label");
2410 break;
2411 default:
2412 break;
2414 return NULL_TREE;
2417 /* Expand all FALLTHROUGH () calls in SEQ. */
2419 static void
2420 expand_FALLTHROUGH (gimple_seq *seq_p)
2422 struct walk_stmt_info wi;
2423 location_t loc;
2424 memset (&wi, 0, sizeof (wi));
2425 wi.info = (void *) &loc;
2426 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2427 if (wi.callback_result == integer_zero_node)
2428 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2429 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2430 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2431 "a case label or default label");
2435 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2436 branch to. */
2438 static enum gimplify_status
2439 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2441 tree switch_expr = *expr_p;
2442 gimple_seq switch_body_seq = NULL;
2443 enum gimplify_status ret;
2444 tree index_type = TREE_TYPE (switch_expr);
2445 if (index_type == NULL_TREE)
2446 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2448 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2449 fb_rvalue);
2450 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2451 return ret;
2453 if (SWITCH_BODY (switch_expr))
2455 vec<tree> labels;
2456 vec<tree> saved_labels;
2457 hash_set<tree> *saved_live_switch_vars = NULL;
2458 tree default_case = NULL_TREE;
2459 gswitch *switch_stmt;
2461 /* Save old labels, get new ones from body, then restore the old
2462 labels. Save all the things from the switch body to append after. */
2463 saved_labels = gimplify_ctxp->case_labels;
2464 gimplify_ctxp->case_labels.create (8);
2466 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2467 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2468 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2469 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2470 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2471 else
2472 gimplify_ctxp->live_switch_vars = NULL;
2474 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2475 gimplify_ctxp->in_switch_expr = true;
2477 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2479 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2480 maybe_warn_switch_unreachable (switch_body_seq);
2481 maybe_warn_implicit_fallthrough (switch_body_seq);
2482 /* Only do this for the outermost GIMPLE_SWITCH. */
2483 if (!gimplify_ctxp->in_switch_expr)
2484 expand_FALLTHROUGH (&switch_body_seq);
2486 labels = gimplify_ctxp->case_labels;
2487 gimplify_ctxp->case_labels = saved_labels;
2489 if (gimplify_ctxp->live_switch_vars)
2491 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2492 delete gimplify_ctxp->live_switch_vars;
2494 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2496 preprocess_case_label_vec_for_gimple (labels, index_type,
2497 &default_case);
2499 bool add_bind = false;
2500 if (!default_case)
2502 glabel *new_default;
2504 default_case
2505 = build_case_label (NULL_TREE, NULL_TREE,
2506 create_artificial_label (UNKNOWN_LOCATION));
2507 if (old_in_switch_expr)
2509 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2510 add_bind = true;
2512 new_default = gimple_build_label (CASE_LABEL (default_case));
2513 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2515 else if (old_in_switch_expr)
2517 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2518 if (last && gimple_code (last) == GIMPLE_LABEL)
2520 tree label = gimple_label_label (as_a <glabel *> (last));
2521 if (SWITCH_BREAK_LABEL_P (label))
2522 add_bind = true;
2526 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2527 default_case, labels);
2528 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2529 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2530 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2531 so that we can easily find the start and end of the switch
2532 statement. */
2533 if (add_bind)
2535 gimple_seq bind_body = NULL;
2536 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2537 gimple_seq_add_seq (&bind_body, switch_body_seq);
2538 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2539 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2540 gimplify_seq_add_stmt (pre_p, bind);
2542 else
2544 gimplify_seq_add_stmt (pre_p, switch_stmt);
2545 gimplify_seq_add_seq (pre_p, switch_body_seq);
2547 labels.release ();
2549 else
2550 gcc_unreachable ();
2552 return GS_ALL_DONE;
2555 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2557 static enum gimplify_status
2558 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2560 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2561 == current_function_decl);
2563 tree label = LABEL_EXPR_LABEL (*expr_p);
2564 glabel *label_stmt = gimple_build_label (label);
2565 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2566 gimplify_seq_add_stmt (pre_p, label_stmt);
2568 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2569 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2570 NOT_TAKEN));
2571 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2572 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2573 TAKEN));
2575 return GS_ALL_DONE;
2578 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2580 static enum gimplify_status
2581 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2583 struct gimplify_ctx *ctxp;
2584 glabel *label_stmt;
2586 /* Invalid programs can play Duff's Device type games with, for example,
2587 #pragma omp parallel. At least in the C front end, we don't
2588 detect such invalid branches until after gimplification, in the
2589 diagnose_omp_blocks pass. */
2590 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2591 if (ctxp->case_labels.exists ())
2592 break;
2594 tree label = CASE_LABEL (*expr_p);
2595 label_stmt = gimple_build_label (label);
2596 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2597 ctxp->case_labels.safe_push (*expr_p);
2598 gimplify_seq_add_stmt (pre_p, label_stmt);
2600 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2601 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2602 NOT_TAKEN));
2603 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2604 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2605 TAKEN));
2607 return GS_ALL_DONE;
2610 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2611 if necessary. */
2613 tree
2614 build_and_jump (tree *label_p)
2616 if (label_p == NULL)
2617 /* If there's nowhere to jump, just fall through. */
2618 return NULL_TREE;
2620 if (*label_p == NULL_TREE)
2622 tree label = create_artificial_label (UNKNOWN_LOCATION);
2623 *label_p = label;
2626 return build1 (GOTO_EXPR, void_type_node, *label_p);
2629 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2630 This also involves building a label to jump to and communicating it to
2631 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2633 static enum gimplify_status
2634 gimplify_exit_expr (tree *expr_p)
2636 tree cond = TREE_OPERAND (*expr_p, 0);
2637 tree expr;
2639 expr = build_and_jump (&gimplify_ctxp->exit_label);
2640 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2641 *expr_p = expr;
2643 return GS_OK;
2646 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2647 different from its canonical type, wrap the whole thing inside a
2648 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2649 type.
2651 The canonical type of a COMPONENT_REF is the type of the field being
2652 referenced--unless the field is a bit-field which can be read directly
2653 in a smaller mode, in which case the canonical type is the
2654 sign-appropriate type corresponding to that mode. */
2656 static void
2657 canonicalize_component_ref (tree *expr_p)
2659 tree expr = *expr_p;
2660 tree type;
2662 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2664 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2665 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2666 else
2667 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2669 /* One could argue that all the stuff below is not necessary for
2670 the non-bitfield case and declare it a FE error if type
2671 adjustment would be needed. */
2672 if (TREE_TYPE (expr) != type)
2674 #ifdef ENABLE_TYPES_CHECKING
2675 tree old_type = TREE_TYPE (expr);
2676 #endif
2677 int type_quals;
2679 /* We need to preserve qualifiers and propagate them from
2680 operand 0. */
2681 type_quals = TYPE_QUALS (type)
2682 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2683 if (TYPE_QUALS (type) != type_quals)
2684 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2686 /* Set the type of the COMPONENT_REF to the underlying type. */
2687 TREE_TYPE (expr) = type;
2689 #ifdef ENABLE_TYPES_CHECKING
2690 /* It is now a FE error, if the conversion from the canonical
2691 type to the original expression type is not useless. */
2692 gcc_assert (useless_type_conversion_p (old_type, type));
2693 #endif
2697 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2698 to foo, embed that change in the ADDR_EXPR by converting
2699 T array[U];
2700 (T *)&array
2702 &array[L]
2703 where L is the lower bound. For simplicity, only do this for constant
2704 lower bound.
2705 The constraint is that the type of &array[L] is trivially convertible
2706 to T *. */
2708 static void
2709 canonicalize_addr_expr (tree *expr_p)
2711 tree expr = *expr_p;
2712 tree addr_expr = TREE_OPERAND (expr, 0);
2713 tree datype, ddatype, pddatype;
2715 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2716 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2717 || TREE_CODE (addr_expr) != ADDR_EXPR)
2718 return;
2720 /* The addr_expr type should be a pointer to an array. */
2721 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2722 if (TREE_CODE (datype) != ARRAY_TYPE)
2723 return;
2725 /* The pointer to element type shall be trivially convertible to
2726 the expression pointer type. */
2727 ddatype = TREE_TYPE (datype);
2728 pddatype = build_pointer_type (ddatype);
2729 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2730 pddatype))
2731 return;
2733 /* The lower bound and element sizes must be constant. */
2734 if (!TYPE_SIZE_UNIT (ddatype)
2735 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2736 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2737 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2738 return;
2740 /* All checks succeeded. Build a new node to merge the cast. */
2741 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2742 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2743 NULL_TREE, NULL_TREE);
2744 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2746 /* We can have stripped a required restrict qualifier above. */
2747 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2748 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2751 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2752 underneath as appropriate. */
2754 static enum gimplify_status
2755 gimplify_conversion (tree *expr_p)
2757 location_t loc = EXPR_LOCATION (*expr_p);
2758 gcc_assert (CONVERT_EXPR_P (*expr_p));
2760 /* Then strip away all but the outermost conversion. */
2761 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2763 /* And remove the outermost conversion if it's useless. */
2764 if (tree_ssa_useless_type_conversion (*expr_p))
2765 *expr_p = TREE_OPERAND (*expr_p, 0);
2767 /* If we still have a conversion at the toplevel,
2768 then canonicalize some constructs. */
2769 if (CONVERT_EXPR_P (*expr_p))
2771 tree sub = TREE_OPERAND (*expr_p, 0);
2773 /* If a NOP conversion is changing the type of a COMPONENT_REF
2774 expression, then canonicalize its type now in order to expose more
2775 redundant conversions. */
2776 if (TREE_CODE (sub) == COMPONENT_REF)
2777 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2779 /* If a NOP conversion is changing a pointer to array of foo
2780 to a pointer to foo, embed that change in the ADDR_EXPR. */
2781 else if (TREE_CODE (sub) == ADDR_EXPR)
2782 canonicalize_addr_expr (expr_p);
2785 /* If we have a conversion to a non-register type force the
2786 use of a VIEW_CONVERT_EXPR instead. */
2787 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2788 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2789 TREE_OPERAND (*expr_p, 0));
2791 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2792 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2793 TREE_SET_CODE (*expr_p, NOP_EXPR);
2795 return GS_OK;
2798 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2799 DECL_VALUE_EXPR, and it's worth re-examining things. */
2801 static enum gimplify_status
2802 gimplify_var_or_parm_decl (tree *expr_p)
2804 tree decl = *expr_p;
2806 /* ??? If this is a local variable, and it has not been seen in any
2807 outer BIND_EXPR, then it's probably the result of a duplicate
2808 declaration, for which we've already issued an error. It would
2809 be really nice if the front end wouldn't leak these at all.
2810 Currently the only known culprit is C++ destructors, as seen
2811 in g++.old-deja/g++.jason/binding.C. */
2812 if (VAR_P (decl)
2813 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2814 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2815 && decl_function_context (decl) == current_function_decl)
2817 gcc_assert (seen_error ());
2818 return GS_ERROR;
2821 /* When within an OMP context, notice uses of variables. */
2822 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2823 return GS_ALL_DONE;
2825 /* If the decl is an alias for another expression, substitute it now. */
2826 if (DECL_HAS_VALUE_EXPR_P (decl))
2828 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2829 return GS_OK;
2832 return GS_ALL_DONE;
2835 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2837 static void
2838 recalculate_side_effects (tree t)
2840 enum tree_code code = TREE_CODE (t);
2841 int len = TREE_OPERAND_LENGTH (t);
2842 int i;
2844 switch (TREE_CODE_CLASS (code))
2846 case tcc_expression:
2847 switch (code)
2849 case INIT_EXPR:
2850 case MODIFY_EXPR:
2851 case VA_ARG_EXPR:
2852 case PREDECREMENT_EXPR:
2853 case PREINCREMENT_EXPR:
2854 case POSTDECREMENT_EXPR:
2855 case POSTINCREMENT_EXPR:
2856 /* All of these have side-effects, no matter what their
2857 operands are. */
2858 return;
2860 default:
2861 break;
2863 /* Fall through. */
2865 case tcc_comparison: /* a comparison expression */
2866 case tcc_unary: /* a unary arithmetic expression */
2867 case tcc_binary: /* a binary arithmetic expression */
2868 case tcc_reference: /* a reference */
2869 case tcc_vl_exp: /* a function call */
2870 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2871 for (i = 0; i < len; ++i)
2873 tree op = TREE_OPERAND (t, i);
2874 if (op && TREE_SIDE_EFFECTS (op))
2875 TREE_SIDE_EFFECTS (t) = 1;
2877 break;
2879 case tcc_constant:
2880 /* No side-effects. */
2881 return;
2883 default:
2884 gcc_unreachable ();
2888 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2889 node *EXPR_P.
2891 compound_lval
2892 : min_lval '[' val ']'
2893 | min_lval '.' ID
2894 | compound_lval '[' val ']'
2895 | compound_lval '.' ID
2897 This is not part of the original SIMPLE definition, which separates
2898 array and member references, but it seems reasonable to handle them
2899 together. Also, this way we don't run into problems with union
2900 aliasing; gcc requires that for accesses through a union to alias, the
2901 union reference must be explicit, which was not always the case when we
2902 were splitting up array and member refs.
2904 PRE_P points to the sequence where side effects that must happen before
2905 *EXPR_P should be stored.
2907 POST_P points to the sequence where side effects that must happen after
2908 *EXPR_P should be stored. */
2910 static enum gimplify_status
2911 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2912 fallback_t fallback)
2914 tree *p;
2915 enum gimplify_status ret = GS_ALL_DONE, tret;
2916 int i;
2917 location_t loc = EXPR_LOCATION (*expr_p);
2918 tree expr = *expr_p;
2920 /* Create a stack of the subexpressions so later we can walk them in
2921 order from inner to outer. */
2922 auto_vec<tree, 10> expr_stack;
2924 /* We can handle anything that get_inner_reference can deal with. */
2925 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2927 restart:
2928 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2929 if (TREE_CODE (*p) == INDIRECT_REF)
2930 *p = fold_indirect_ref_loc (loc, *p);
2932 if (handled_component_p (*p))
2934 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2935 additional COMPONENT_REFs. */
2936 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2937 && gimplify_var_or_parm_decl (p) == GS_OK)
2938 goto restart;
2939 else
2940 break;
2942 expr_stack.safe_push (*p);
2945 gcc_assert (expr_stack.length ());
2947 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2948 walked through and P points to the innermost expression.
2950 Java requires that we elaborated nodes in source order. That
2951 means we must gimplify the inner expression followed by each of
2952 the indices, in order. But we can't gimplify the inner
2953 expression until we deal with any variable bounds, sizes, or
2954 positions in order to deal with PLACEHOLDER_EXPRs.
2956 So we do this in three steps. First we deal with the annotations
2957 for any variables in the components, then we gimplify the base,
2958 then we gimplify any indices, from left to right. */
2959 for (i = expr_stack.length () - 1; i >= 0; i--)
2961 tree t = expr_stack[i];
2963 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2965 /* Gimplify the low bound and element type size and put them into
2966 the ARRAY_REF. If these values are set, they have already been
2967 gimplified. */
2968 if (TREE_OPERAND (t, 2) == NULL_TREE)
2970 tree low = unshare_expr (array_ref_low_bound (t));
2971 if (!is_gimple_min_invariant (low))
2973 TREE_OPERAND (t, 2) = low;
2974 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2975 post_p, is_gimple_reg,
2976 fb_rvalue);
2977 ret = MIN (ret, tret);
2980 else
2982 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2983 is_gimple_reg, fb_rvalue);
2984 ret = MIN (ret, tret);
2987 if (TREE_OPERAND (t, 3) == NULL_TREE)
2989 tree elmt_size = array_ref_element_size (t);
2990 if (!is_gimple_min_invariant (elmt_size))
2992 elmt_size = unshare_expr (elmt_size);
2993 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2994 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2996 /* Divide the element size by the alignment of the element
2997 type (above). */
2998 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
2999 elmt_size, factor);
3001 TREE_OPERAND (t, 3) = elmt_size;
3002 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
3003 post_p, is_gimple_reg,
3004 fb_rvalue);
3005 ret = MIN (ret, tret);
3008 else
3010 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3011 is_gimple_reg, fb_rvalue);
3012 ret = MIN (ret, tret);
3015 else if (TREE_CODE (t) == COMPONENT_REF)
3017 /* Set the field offset into T and gimplify it. */
3018 if (TREE_OPERAND (t, 2) == NULL_TREE)
3020 tree offset = component_ref_field_offset (t);
3021 if (!is_gimple_min_invariant (offset))
3023 offset = unshare_expr (offset);
3024 tree field = TREE_OPERAND (t, 1);
3025 tree factor
3026 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3028 /* Divide the offset by its alignment. */
3029 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3030 offset, factor);
3032 TREE_OPERAND (t, 2) = offset;
3033 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
3034 post_p, is_gimple_reg,
3035 fb_rvalue);
3036 ret = MIN (ret, tret);
3039 else
3041 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3042 is_gimple_reg, fb_rvalue);
3043 ret = MIN (ret, tret);
3048 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3049 so as to match the min_lval predicate. Failure to do so may result
3050 in the creation of large aggregate temporaries. */
3051 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3052 fallback | fb_lvalue);
3053 ret = MIN (ret, tret);
3055 /* And finally, the indices and operands of ARRAY_REF. During this
3056 loop we also remove any useless conversions. */
3057 for (; expr_stack.length () > 0; )
3059 tree t = expr_stack.pop ();
3061 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3063 /* Gimplify the dimension. */
3064 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3066 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3067 is_gimple_val, fb_rvalue);
3068 ret = MIN (ret, tret);
3072 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3074 /* The innermost expression P may have originally had
3075 TREE_SIDE_EFFECTS set which would have caused all the outer
3076 expressions in *EXPR_P leading to P to also have had
3077 TREE_SIDE_EFFECTS set. */
3078 recalculate_side_effects (t);
3081 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3082 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3084 canonicalize_component_ref (expr_p);
3087 expr_stack.release ();
3089 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3091 return ret;
3094 /* Gimplify the self modifying expression pointed to by EXPR_P
3095 (++, --, +=, -=).
3097 PRE_P points to the list where side effects that must happen before
3098 *EXPR_P should be stored.
3100 POST_P points to the list where side effects that must happen after
3101 *EXPR_P should be stored.
3103 WANT_VALUE is nonzero iff we want to use the value of this expression
3104 in another expression.
3106 ARITH_TYPE is the type the computation should be performed in. */
3108 enum gimplify_status
3109 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3110 bool want_value, tree arith_type)
3112 enum tree_code code;
3113 tree lhs, lvalue, rhs, t1;
3114 gimple_seq post = NULL, *orig_post_p = post_p;
3115 bool postfix;
3116 enum tree_code arith_code;
3117 enum gimplify_status ret;
3118 location_t loc = EXPR_LOCATION (*expr_p);
3120 code = TREE_CODE (*expr_p);
3122 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3123 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3125 /* Prefix or postfix? */
3126 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3127 /* Faster to treat as prefix if result is not used. */
3128 postfix = want_value;
3129 else
3130 postfix = false;
3132 /* For postfix, make sure the inner expression's post side effects
3133 are executed after side effects from this expression. */
3134 if (postfix)
3135 post_p = &post;
3137 /* Add or subtract? */
3138 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3139 arith_code = PLUS_EXPR;
3140 else
3141 arith_code = MINUS_EXPR;
3143 /* Gimplify the LHS into a GIMPLE lvalue. */
3144 lvalue = TREE_OPERAND (*expr_p, 0);
3145 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3146 if (ret == GS_ERROR)
3147 return ret;
3149 /* Extract the operands to the arithmetic operation. */
3150 lhs = lvalue;
3151 rhs = TREE_OPERAND (*expr_p, 1);
3153 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3154 that as the result value and in the postqueue operation. */
3155 if (postfix)
3157 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3158 if (ret == GS_ERROR)
3159 return ret;
3161 lhs = get_initialized_tmp_var (lhs, pre_p);
3164 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3165 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3167 rhs = convert_to_ptrofftype_loc (loc, rhs);
3168 if (arith_code == MINUS_EXPR)
3169 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3170 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3172 else
3173 t1 = fold_convert (TREE_TYPE (*expr_p),
3174 fold_build2 (arith_code, arith_type,
3175 fold_convert (arith_type, lhs),
3176 fold_convert (arith_type, rhs)));
3178 if (postfix)
3180 gimplify_assign (lvalue, t1, pre_p);
3181 gimplify_seq_add_seq (orig_post_p, post);
3182 *expr_p = lhs;
3183 return GS_ALL_DONE;
3185 else
3187 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3188 return GS_OK;
3192 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3194 static void
3195 maybe_with_size_expr (tree *expr_p)
3197 tree expr = *expr_p;
3198 tree type = TREE_TYPE (expr);
3199 tree size;
3201 /* If we've already wrapped this or the type is error_mark_node, we can't do
3202 anything. */
3203 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3204 || type == error_mark_node)
3205 return;
3207 /* If the size isn't known or is a constant, we have nothing to do. */
3208 size = TYPE_SIZE_UNIT (type);
3209 if (!size || poly_int_tree_p (size))
3210 return;
3212 /* Otherwise, make a WITH_SIZE_EXPR. */
3213 size = unshare_expr (size);
3214 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3215 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3218 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3219 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3220 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3221 gimplified to an SSA name. */
3223 enum gimplify_status
3224 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3225 bool allow_ssa)
3227 bool (*test) (tree);
3228 fallback_t fb;
3230 /* In general, we allow lvalues for function arguments to avoid
3231 extra overhead of copying large aggregates out of even larger
3232 aggregates into temporaries only to copy the temporaries to
3233 the argument list. Make optimizers happy by pulling out to
3234 temporaries those types that fit in registers. */
3235 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3236 test = is_gimple_val, fb = fb_rvalue;
3237 else
3239 test = is_gimple_lvalue, fb = fb_either;
3240 /* Also strip a TARGET_EXPR that would force an extra copy. */
3241 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3243 tree init = TARGET_EXPR_INITIAL (*arg_p);
3244 if (init
3245 && !VOID_TYPE_P (TREE_TYPE (init)))
3246 *arg_p = init;
3250 /* If this is a variable sized type, we must remember the size. */
3251 maybe_with_size_expr (arg_p);
3253 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3254 /* Make sure arguments have the same location as the function call
3255 itself. */
3256 protected_set_expr_location (*arg_p, call_location);
3258 /* There is a sequence point before a function call. Side effects in
3259 the argument list must occur before the actual call. So, when
3260 gimplifying arguments, force gimplify_expr to use an internal
3261 post queue which is then appended to the end of PRE_P. */
3262 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3265 /* Don't fold inside offloading or taskreg regions: it can break code by
3266 adding decl references that weren't in the source. We'll do it during
3267 omplower pass instead. */
3269 static bool
3270 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3272 struct gimplify_omp_ctx *ctx;
3273 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3274 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3275 return false;
3276 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3277 return false;
3278 /* Delay folding of builtins until the IL is in consistent state
3279 so the diagnostic machinery can do a better job. */
3280 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3281 return false;
3282 return fold_stmt (gsi);
3285 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3286 WANT_VALUE is true if the result of the call is desired. */
3288 static enum gimplify_status
3289 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3291 tree fndecl, parms, p, fnptrtype;
3292 enum gimplify_status ret;
3293 int i, nargs;
3294 gcall *call;
3295 bool builtin_va_start_p = false;
3296 location_t loc = EXPR_LOCATION (*expr_p);
3298 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3300 /* For reliable diagnostics during inlining, it is necessary that
3301 every call_expr be annotated with file and line. */
3302 if (! EXPR_HAS_LOCATION (*expr_p))
3303 SET_EXPR_LOCATION (*expr_p, input_location);
3305 /* Gimplify internal functions created in the FEs. */
3306 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3308 if (want_value)
3309 return GS_ALL_DONE;
3311 nargs = call_expr_nargs (*expr_p);
3312 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3313 auto_vec<tree> vargs (nargs);
3315 for (i = 0; i < nargs; i++)
3317 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3318 EXPR_LOCATION (*expr_p));
3319 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3322 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3323 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3324 gimplify_seq_add_stmt (pre_p, call);
3325 return GS_ALL_DONE;
3328 /* This may be a call to a builtin function.
3330 Builtin function calls may be transformed into different
3331 (and more efficient) builtin function calls under certain
3332 circumstances. Unfortunately, gimplification can muck things
3333 up enough that the builtin expanders are not aware that certain
3334 transformations are still valid.
3336 So we attempt transformation/gimplification of the call before
3337 we gimplify the CALL_EXPR. At this time we do not manage to
3338 transform all calls in the same manner as the expanders do, but
3339 we do transform most of them. */
3340 fndecl = get_callee_fndecl (*expr_p);
3341 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3342 switch (DECL_FUNCTION_CODE (fndecl))
3344 CASE_BUILT_IN_ALLOCA:
3345 /* If the call has been built for a variable-sized object, then we
3346 want to restore the stack level when the enclosing BIND_EXPR is
3347 exited to reclaim the allocated space; otherwise, we precisely
3348 need to do the opposite and preserve the latest stack level. */
3349 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3350 gimplify_ctxp->save_stack = true;
3351 else
3352 gimplify_ctxp->keep_stack = true;
3353 break;
3355 case BUILT_IN_VA_START:
3357 builtin_va_start_p = TRUE;
3358 if (call_expr_nargs (*expr_p) < 2)
3360 error ("too few arguments to function %<va_start%>");
3361 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3362 return GS_OK;
3365 if (fold_builtin_next_arg (*expr_p, true))
3367 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3368 return GS_OK;
3370 break;
3373 case BUILT_IN_EH_RETURN:
3374 cfun->calls_eh_return = true;
3375 break;
3377 default:
3380 if (fndecl && fndecl_built_in_p (fndecl))
3382 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3383 if (new_tree && new_tree != *expr_p)
3385 /* There was a transformation of this call which computes the
3386 same value, but in a more efficient way. Return and try
3387 again. */
3388 *expr_p = new_tree;
3389 return GS_OK;
3393 /* Remember the original function pointer type. */
3394 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3396 if (flag_openmp
3397 && fndecl
3398 && cfun
3399 && (cfun->curr_properties & PROP_gimple_any) == 0)
3401 tree variant = omp_resolve_declare_variant (fndecl);
3402 if (variant != fndecl)
3403 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3406 /* There is a sequence point before the call, so any side effects in
3407 the calling expression must occur before the actual call. Force
3408 gimplify_expr to use an internal post queue. */
3409 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3410 is_gimple_call_addr, fb_rvalue);
3412 nargs = call_expr_nargs (*expr_p);
3414 /* Get argument types for verification. */
3415 fndecl = get_callee_fndecl (*expr_p);
3416 parms = NULL_TREE;
3417 if (fndecl)
3418 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3419 else
3420 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3422 if (fndecl && DECL_ARGUMENTS (fndecl))
3423 p = DECL_ARGUMENTS (fndecl);
3424 else if (parms)
3425 p = parms;
3426 else
3427 p = NULL_TREE;
3428 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3431 /* If the last argument is __builtin_va_arg_pack () and it is not
3432 passed as a named argument, decrease the number of CALL_EXPR
3433 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3434 if (!p
3435 && i < nargs
3436 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3438 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3439 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3441 if (last_arg_fndecl
3442 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3444 tree call = *expr_p;
3446 --nargs;
3447 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3448 CALL_EXPR_FN (call),
3449 nargs, CALL_EXPR_ARGP (call));
3451 /* Copy all CALL_EXPR flags, location and block, except
3452 CALL_EXPR_VA_ARG_PACK flag. */
3453 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3454 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3455 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3456 = CALL_EXPR_RETURN_SLOT_OPT (call);
3457 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3458 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3460 /* Set CALL_EXPR_VA_ARG_PACK. */
3461 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3465 /* If the call returns twice then after building the CFG the call
3466 argument computations will no longer dominate the call because
3467 we add an abnormal incoming edge to the call. So do not use SSA
3468 vars there. */
3469 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3471 /* Gimplify the function arguments. */
3472 if (nargs > 0)
3474 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3475 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3476 PUSH_ARGS_REVERSED ? i-- : i++)
3478 enum gimplify_status t;
3480 /* Avoid gimplifying the second argument to va_start, which needs to
3481 be the plain PARM_DECL. */
3482 if ((i != 1) || !builtin_va_start_p)
3484 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3485 EXPR_LOCATION (*expr_p), ! returns_twice);
3487 if (t == GS_ERROR)
3488 ret = GS_ERROR;
3493 /* Gimplify the static chain. */
3494 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3496 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3497 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3498 else
3500 enum gimplify_status t;
3501 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3502 EXPR_LOCATION (*expr_p), ! returns_twice);
3503 if (t == GS_ERROR)
3504 ret = GS_ERROR;
3508 /* Verify the function result. */
3509 if (want_value && fndecl
3510 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3512 error_at (loc, "using result of function returning %<void%>");
3513 ret = GS_ERROR;
3516 /* Try this again in case gimplification exposed something. */
3517 if (ret != GS_ERROR)
3519 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3521 if (new_tree && new_tree != *expr_p)
3523 /* There was a transformation of this call which computes the
3524 same value, but in a more efficient way. Return and try
3525 again. */
3526 *expr_p = new_tree;
3527 return GS_OK;
3530 else
3532 *expr_p = error_mark_node;
3533 return GS_ERROR;
3536 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3537 decl. This allows us to eliminate redundant or useless
3538 calls to "const" functions. */
3539 if (TREE_CODE (*expr_p) == CALL_EXPR)
3541 int flags = call_expr_flags (*expr_p);
3542 if (flags & (ECF_CONST | ECF_PURE)
3543 /* An infinite loop is considered a side effect. */
3544 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3545 TREE_SIDE_EFFECTS (*expr_p) = 0;
3548 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3549 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3550 form and delegate the creation of a GIMPLE_CALL to
3551 gimplify_modify_expr. This is always possible because when
3552 WANT_VALUE is true, the caller wants the result of this call into
3553 a temporary, which means that we will emit an INIT_EXPR in
3554 internal_get_tmp_var which will then be handled by
3555 gimplify_modify_expr. */
3556 if (!want_value)
3558 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3559 have to do is replicate it as a GIMPLE_CALL tuple. */
3560 gimple_stmt_iterator gsi;
3561 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3562 notice_special_calls (call);
3563 gimplify_seq_add_stmt (pre_p, call);
3564 gsi = gsi_last (*pre_p);
3565 maybe_fold_stmt (&gsi);
3566 *expr_p = NULL_TREE;
3568 else
3569 /* Remember the original function type. */
3570 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3571 CALL_EXPR_FN (*expr_p));
3573 return ret;
3576 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3577 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3579 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3580 condition is true or false, respectively. If null, we should generate
3581 our own to skip over the evaluation of this specific expression.
3583 LOCUS is the source location of the COND_EXPR.
3585 This function is the tree equivalent of do_jump.
3587 shortcut_cond_r should only be called by shortcut_cond_expr. */
3589 static tree
3590 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3591 location_t locus)
3593 tree local_label = NULL_TREE;
3594 tree t, expr = NULL;
3596 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3597 retain the shortcut semantics. Just insert the gotos here;
3598 shortcut_cond_expr will append the real blocks later. */
3599 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3601 location_t new_locus;
3603 /* Turn if (a && b) into
3605 if (a); else goto no;
3606 if (b) goto yes; else goto no;
3607 (no:) */
3609 if (false_label_p == NULL)
3610 false_label_p = &local_label;
3612 /* Keep the original source location on the first 'if'. */
3613 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3614 append_to_statement_list (t, &expr);
3616 /* Set the source location of the && on the second 'if'. */
3617 new_locus = rexpr_location (pred, locus);
3618 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3619 new_locus);
3620 append_to_statement_list (t, &expr);
3622 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3624 location_t new_locus;
3626 /* Turn if (a || b) into
3628 if (a) goto yes;
3629 if (b) goto yes; else goto no;
3630 (yes:) */
3632 if (true_label_p == NULL)
3633 true_label_p = &local_label;
3635 /* Keep the original source location on the first 'if'. */
3636 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3637 append_to_statement_list (t, &expr);
3639 /* Set the source location of the || on the second 'if'. */
3640 new_locus = rexpr_location (pred, locus);
3641 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3642 new_locus);
3643 append_to_statement_list (t, &expr);
3645 else if (TREE_CODE (pred) == COND_EXPR
3646 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3647 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3649 location_t new_locus;
3651 /* As long as we're messing with gotos, turn if (a ? b : c) into
3652 if (a)
3653 if (b) goto yes; else goto no;
3654 else
3655 if (c) goto yes; else goto no;
3657 Don't do this if one of the arms has void type, which can happen
3658 in C++ when the arm is throw. */
3660 /* Keep the original source location on the first 'if'. Set the source
3661 location of the ? on the second 'if'. */
3662 new_locus = rexpr_location (pred, locus);
3663 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3664 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3665 false_label_p, locus),
3666 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3667 false_label_p, new_locus));
3669 else
3671 expr = build3 (COND_EXPR, void_type_node, pred,
3672 build_and_jump (true_label_p),
3673 build_and_jump (false_label_p));
3674 SET_EXPR_LOCATION (expr, locus);
3677 if (local_label)
3679 t = build1 (LABEL_EXPR, void_type_node, local_label);
3680 append_to_statement_list (t, &expr);
3683 return expr;
3686 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3687 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3688 statement, if it is the last one. Otherwise, return NULL. */
3690 static tree
3691 find_goto (tree expr)
3693 if (!expr)
3694 return NULL_TREE;
3696 if (TREE_CODE (expr) == GOTO_EXPR)
3697 return expr;
3699 if (TREE_CODE (expr) != STATEMENT_LIST)
3700 return NULL_TREE;
3702 tree_stmt_iterator i = tsi_start (expr);
3704 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3705 tsi_next (&i);
3707 if (!tsi_one_before_end_p (i))
3708 return NULL_TREE;
3710 return find_goto (tsi_stmt (i));
3713 /* Same as find_goto, except that it returns NULL if the destination
3714 is not a LABEL_DECL. */
3716 static inline tree
3717 find_goto_label (tree expr)
3719 tree dest = find_goto (expr);
3720 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3721 return dest;
3722 return NULL_TREE;
3725 /* Given a conditional expression EXPR with short-circuit boolean
3726 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3727 predicate apart into the equivalent sequence of conditionals. */
3729 static tree
3730 shortcut_cond_expr (tree expr)
3732 tree pred = TREE_OPERAND (expr, 0);
3733 tree then_ = TREE_OPERAND (expr, 1);
3734 tree else_ = TREE_OPERAND (expr, 2);
3735 tree true_label, false_label, end_label, t;
3736 tree *true_label_p;
3737 tree *false_label_p;
3738 bool emit_end, emit_false, jump_over_else;
3739 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3740 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3742 /* First do simple transformations. */
3743 if (!else_se)
3745 /* If there is no 'else', turn
3746 if (a && b) then c
3747 into
3748 if (a) if (b) then c. */
3749 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3751 /* Keep the original source location on the first 'if'. */
3752 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3753 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3754 /* Set the source location of the && on the second 'if'. */
3755 if (rexpr_has_location (pred))
3756 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3757 then_ = shortcut_cond_expr (expr);
3758 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3759 pred = TREE_OPERAND (pred, 0);
3760 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3761 SET_EXPR_LOCATION (expr, locus);
3765 if (!then_se)
3767 /* If there is no 'then', turn
3768 if (a || b); else d
3769 into
3770 if (a); else if (b); else d. */
3771 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3773 /* Keep the original source location on the first 'if'. */
3774 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3775 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3776 /* Set the source location of the || on the second 'if'. */
3777 if (rexpr_has_location (pred))
3778 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3779 else_ = shortcut_cond_expr (expr);
3780 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3781 pred = TREE_OPERAND (pred, 0);
3782 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3783 SET_EXPR_LOCATION (expr, locus);
3787 /* If we're done, great. */
3788 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3789 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3790 return expr;
3792 /* Otherwise we need to mess with gotos. Change
3793 if (a) c; else d;
3795 if (a); else goto no;
3796 c; goto end;
3797 no: d; end:
3798 and recursively gimplify the condition. */
3800 true_label = false_label = end_label = NULL_TREE;
3802 /* If our arms just jump somewhere, hijack those labels so we don't
3803 generate jumps to jumps. */
3805 if (tree then_goto = find_goto_label (then_))
3807 true_label = GOTO_DESTINATION (then_goto);
3808 then_ = NULL;
3809 then_se = false;
3812 if (tree else_goto = find_goto_label (else_))
3814 false_label = GOTO_DESTINATION (else_goto);
3815 else_ = NULL;
3816 else_se = false;
3819 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3820 if (true_label)
3821 true_label_p = &true_label;
3822 else
3823 true_label_p = NULL;
3825 /* The 'else' branch also needs a label if it contains interesting code. */
3826 if (false_label || else_se)
3827 false_label_p = &false_label;
3828 else
3829 false_label_p = NULL;
3831 /* If there was nothing else in our arms, just forward the label(s). */
3832 if (!then_se && !else_se)
3833 return shortcut_cond_r (pred, true_label_p, false_label_p,
3834 EXPR_LOC_OR_LOC (expr, input_location));
3836 /* If our last subexpression already has a terminal label, reuse it. */
3837 if (else_se)
3838 t = expr_last (else_);
3839 else if (then_se)
3840 t = expr_last (then_);
3841 else
3842 t = NULL;
3843 if (t && TREE_CODE (t) == LABEL_EXPR)
3844 end_label = LABEL_EXPR_LABEL (t);
3846 /* If we don't care about jumping to the 'else' branch, jump to the end
3847 if the condition is false. */
3848 if (!false_label_p)
3849 false_label_p = &end_label;
3851 /* We only want to emit these labels if we aren't hijacking them. */
3852 emit_end = (end_label == NULL_TREE);
3853 emit_false = (false_label == NULL_TREE);
3855 /* We only emit the jump over the else clause if we have to--if the
3856 then clause may fall through. Otherwise we can wind up with a
3857 useless jump and a useless label at the end of gimplified code,
3858 which will cause us to think that this conditional as a whole
3859 falls through even if it doesn't. If we then inline a function
3860 which ends with such a condition, that can cause us to issue an
3861 inappropriate warning about control reaching the end of a
3862 non-void function. */
3863 jump_over_else = block_may_fallthru (then_);
3865 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3866 EXPR_LOC_OR_LOC (expr, input_location));
3868 expr = NULL;
3869 append_to_statement_list (pred, &expr);
3871 append_to_statement_list (then_, &expr);
3872 if (else_se)
3874 if (jump_over_else)
3876 tree last = expr_last (expr);
3877 t = build_and_jump (&end_label);
3878 if (rexpr_has_location (last))
3879 SET_EXPR_LOCATION (t, rexpr_location (last));
3880 append_to_statement_list (t, &expr);
3882 if (emit_false)
3884 t = build1 (LABEL_EXPR, void_type_node, false_label);
3885 append_to_statement_list (t, &expr);
3887 append_to_statement_list (else_, &expr);
3889 if (emit_end && end_label)
3891 t = build1 (LABEL_EXPR, void_type_node, end_label);
3892 append_to_statement_list (t, &expr);
3895 return expr;
3898 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3900 tree
3901 gimple_boolify (tree expr)
3903 tree type = TREE_TYPE (expr);
3904 location_t loc = EXPR_LOCATION (expr);
3906 if (TREE_CODE (expr) == NE_EXPR
3907 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3908 && integer_zerop (TREE_OPERAND (expr, 1)))
3910 tree call = TREE_OPERAND (expr, 0);
3911 tree fn = get_callee_fndecl (call);
3913 /* For __builtin_expect ((long) (x), y) recurse into x as well
3914 if x is truth_value_p. */
3915 if (fn
3916 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3917 && call_expr_nargs (call) == 2)
3919 tree arg = CALL_EXPR_ARG (call, 0);
3920 if (arg)
3922 if (TREE_CODE (arg) == NOP_EXPR
3923 && TREE_TYPE (arg) == TREE_TYPE (call))
3924 arg = TREE_OPERAND (arg, 0);
3925 if (truth_value_p (TREE_CODE (arg)))
3927 arg = gimple_boolify (arg);
3928 CALL_EXPR_ARG (call, 0)
3929 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3935 switch (TREE_CODE (expr))
3937 case TRUTH_AND_EXPR:
3938 case TRUTH_OR_EXPR:
3939 case TRUTH_XOR_EXPR:
3940 case TRUTH_ANDIF_EXPR:
3941 case TRUTH_ORIF_EXPR:
3942 /* Also boolify the arguments of truth exprs. */
3943 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3944 /* FALLTHRU */
3946 case TRUTH_NOT_EXPR:
3947 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3949 /* These expressions always produce boolean results. */
3950 if (TREE_CODE (type) != BOOLEAN_TYPE)
3951 TREE_TYPE (expr) = boolean_type_node;
3952 return expr;
3954 case ANNOTATE_EXPR:
3955 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3957 case annot_expr_ivdep_kind:
3958 case annot_expr_unroll_kind:
3959 case annot_expr_no_vector_kind:
3960 case annot_expr_vector_kind:
3961 case annot_expr_parallel_kind:
3962 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3963 if (TREE_CODE (type) != BOOLEAN_TYPE)
3964 TREE_TYPE (expr) = boolean_type_node;
3965 return expr;
3966 default:
3967 gcc_unreachable ();
3970 default:
3971 if (COMPARISON_CLASS_P (expr))
3973 /* There expressions always prduce boolean results. */
3974 if (TREE_CODE (type) != BOOLEAN_TYPE)
3975 TREE_TYPE (expr) = boolean_type_node;
3976 return expr;
3978 /* Other expressions that get here must have boolean values, but
3979 might need to be converted to the appropriate mode. */
3980 if (TREE_CODE (type) == BOOLEAN_TYPE)
3981 return expr;
3982 return fold_convert_loc (loc, boolean_type_node, expr);
3986 /* Given a conditional expression *EXPR_P without side effects, gimplify
3987 its operands. New statements are inserted to PRE_P. */
3989 static enum gimplify_status
3990 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3992 tree expr = *expr_p, cond;
3993 enum gimplify_status ret, tret;
3994 enum tree_code code;
3996 cond = gimple_boolify (COND_EXPR_COND (expr));
3998 /* We need to handle && and || specially, as their gimplification
3999 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4000 code = TREE_CODE (cond);
4001 if (code == TRUTH_ANDIF_EXPR)
4002 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4003 else if (code == TRUTH_ORIF_EXPR)
4004 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4005 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
4006 COND_EXPR_COND (*expr_p) = cond;
4008 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4009 is_gimple_val, fb_rvalue);
4010 ret = MIN (ret, tret);
4011 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4012 is_gimple_val, fb_rvalue);
4014 return MIN (ret, tret);
4017 /* Return true if evaluating EXPR could trap.
4018 EXPR is GENERIC, while tree_could_trap_p can be called
4019 only on GIMPLE. */
4021 bool
4022 generic_expr_could_trap_p (tree expr)
4024 unsigned i, n;
4026 if (!expr || is_gimple_val (expr))
4027 return false;
4029 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4030 return true;
4032 n = TREE_OPERAND_LENGTH (expr);
4033 for (i = 0; i < n; i++)
4034 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4035 return true;
4037 return false;
4040 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4041 into
4043 if (p) if (p)
4044 t1 = a; a;
4045 else or else
4046 t1 = b; b;
4049 The second form is used when *EXPR_P is of type void.
4051 PRE_P points to the list where side effects that must happen before
4052 *EXPR_P should be stored. */
4054 static enum gimplify_status
4055 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4057 tree expr = *expr_p;
4058 tree type = TREE_TYPE (expr);
4059 location_t loc = EXPR_LOCATION (expr);
4060 tree tmp, arm1, arm2;
4061 enum gimplify_status ret;
4062 tree label_true, label_false, label_cont;
4063 bool have_then_clause_p, have_else_clause_p;
4064 gcond *cond_stmt;
4065 enum tree_code pred_code;
4066 gimple_seq seq = NULL;
4068 /* If this COND_EXPR has a value, copy the values into a temporary within
4069 the arms. */
4070 if (!VOID_TYPE_P (type))
4072 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4073 tree result;
4075 /* If either an rvalue is ok or we do not require an lvalue, create the
4076 temporary. But we cannot do that if the type is addressable. */
4077 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4078 && !TREE_ADDRESSABLE (type))
4080 if (gimplify_ctxp->allow_rhs_cond_expr
4081 /* If either branch has side effects or could trap, it can't be
4082 evaluated unconditionally. */
4083 && !TREE_SIDE_EFFECTS (then_)
4084 && !generic_expr_could_trap_p (then_)
4085 && !TREE_SIDE_EFFECTS (else_)
4086 && !generic_expr_could_trap_p (else_))
4087 return gimplify_pure_cond_expr (expr_p, pre_p);
4089 tmp = create_tmp_var (type, "iftmp");
4090 result = tmp;
4093 /* Otherwise, only create and copy references to the values. */
4094 else
4096 type = build_pointer_type (type);
4098 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4099 then_ = build_fold_addr_expr_loc (loc, then_);
4101 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4102 else_ = build_fold_addr_expr_loc (loc, else_);
4104 expr
4105 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4107 tmp = create_tmp_var (type, "iftmp");
4108 result = build_simple_mem_ref_loc (loc, tmp);
4111 /* Build the new then clause, `tmp = then_;'. But don't build the
4112 assignment if the value is void; in C++ it can be if it's a throw. */
4113 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4114 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4116 /* Similarly, build the new else clause, `tmp = else_;'. */
4117 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4118 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4120 TREE_TYPE (expr) = void_type_node;
4121 recalculate_side_effects (expr);
4123 /* Move the COND_EXPR to the prequeue. */
4124 gimplify_stmt (&expr, pre_p);
4126 *expr_p = result;
4127 return GS_ALL_DONE;
4130 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4131 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4132 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4133 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4135 /* Make sure the condition has BOOLEAN_TYPE. */
4136 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4138 /* Break apart && and || conditions. */
4139 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4140 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4142 expr = shortcut_cond_expr (expr);
4144 if (expr != *expr_p)
4146 *expr_p = expr;
4148 /* We can't rely on gimplify_expr to re-gimplify the expanded
4149 form properly, as cleanups might cause the target labels to be
4150 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4151 set up a conditional context. */
4152 gimple_push_condition ();
4153 gimplify_stmt (expr_p, &seq);
4154 gimple_pop_condition (pre_p);
4155 gimple_seq_add_seq (pre_p, seq);
4157 return GS_ALL_DONE;
4161 /* Now do the normal gimplification. */
4163 /* Gimplify condition. */
4164 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4165 is_gimple_condexpr_for_cond, fb_rvalue);
4166 if (ret == GS_ERROR)
4167 return GS_ERROR;
4168 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4170 gimple_push_condition ();
4172 have_then_clause_p = have_else_clause_p = false;
4173 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4174 if (label_true
4175 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4176 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4177 have different locations, otherwise we end up with incorrect
4178 location information on the branches. */
4179 && (optimize
4180 || !EXPR_HAS_LOCATION (expr)
4181 || !rexpr_has_location (label_true)
4182 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4184 have_then_clause_p = true;
4185 label_true = GOTO_DESTINATION (label_true);
4187 else
4188 label_true = create_artificial_label (UNKNOWN_LOCATION);
4189 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4190 if (label_false
4191 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4192 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4193 have different locations, otherwise we end up with incorrect
4194 location information on the branches. */
4195 && (optimize
4196 || !EXPR_HAS_LOCATION (expr)
4197 || !rexpr_has_location (label_false)
4198 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4200 have_else_clause_p = true;
4201 label_false = GOTO_DESTINATION (label_false);
4203 else
4204 label_false = create_artificial_label (UNKNOWN_LOCATION);
4206 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4207 &arm2);
4208 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4209 label_false);
4210 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4211 gimplify_seq_add_stmt (&seq, cond_stmt);
4212 gimple_stmt_iterator gsi = gsi_last (seq);
4213 maybe_fold_stmt (&gsi);
4215 label_cont = NULL_TREE;
4216 if (!have_then_clause_p)
4218 /* For if (...) {} else { code; } put label_true after
4219 the else block. */
4220 if (TREE_OPERAND (expr, 1) == NULL_TREE
4221 && !have_else_clause_p
4222 && TREE_OPERAND (expr, 2) != NULL_TREE)
4223 label_cont = label_true;
4224 else
4226 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4227 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4228 /* For if (...) { code; } else {} or
4229 if (...) { code; } else goto label; or
4230 if (...) { code; return; } else { ... }
4231 label_cont isn't needed. */
4232 if (!have_else_clause_p
4233 && TREE_OPERAND (expr, 2) != NULL_TREE
4234 && gimple_seq_may_fallthru (seq))
4236 gimple *g;
4237 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4239 g = gimple_build_goto (label_cont);
4241 /* GIMPLE_COND's are very low level; they have embedded
4242 gotos. This particular embedded goto should not be marked
4243 with the location of the original COND_EXPR, as it would
4244 correspond to the COND_EXPR's condition, not the ELSE or the
4245 THEN arms. To avoid marking it with the wrong location, flag
4246 it as "no location". */
4247 gimple_set_do_not_emit_location (g);
4249 gimplify_seq_add_stmt (&seq, g);
4253 if (!have_else_clause_p)
4255 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4256 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4258 if (label_cont)
4259 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4261 gimple_pop_condition (pre_p);
4262 gimple_seq_add_seq (pre_p, seq);
4264 if (ret == GS_ERROR)
4265 ; /* Do nothing. */
4266 else if (have_then_clause_p || have_else_clause_p)
4267 ret = GS_ALL_DONE;
4268 else
4270 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4271 expr = TREE_OPERAND (expr, 0);
4272 gimplify_stmt (&expr, pre_p);
4275 *expr_p = NULL;
4276 return ret;
4279 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4280 to be marked addressable.
4282 We cannot rely on such an expression being directly markable if a temporary
4283 has been created by the gimplification. In this case, we create another
4284 temporary and initialize it with a copy, which will become a store after we
4285 mark it addressable. This can happen if the front-end passed us something
4286 that it could not mark addressable yet, like a Fortran pass-by-reference
4287 parameter (int) floatvar. */
4289 static void
4290 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4292 while (handled_component_p (*expr_p))
4293 expr_p = &TREE_OPERAND (*expr_p, 0);
4294 if (is_gimple_reg (*expr_p))
4296 /* Do not allow an SSA name as the temporary. */
4297 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4298 DECL_NOT_GIMPLE_REG_P (var) = 1;
4299 *expr_p = var;
4303 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4304 a call to __builtin_memcpy. */
4306 static enum gimplify_status
4307 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4308 gimple_seq *seq_p)
4310 tree t, to, to_ptr, from, from_ptr;
4311 gcall *gs;
4312 location_t loc = EXPR_LOCATION (*expr_p);
4314 to = TREE_OPERAND (*expr_p, 0);
4315 from = TREE_OPERAND (*expr_p, 1);
4317 /* Mark the RHS addressable. Beware that it may not be possible to do so
4318 directly if a temporary has been created by the gimplification. */
4319 prepare_gimple_addressable (&from, seq_p);
4321 mark_addressable (from);
4322 from_ptr = build_fold_addr_expr_loc (loc, from);
4323 gimplify_arg (&from_ptr, seq_p, loc);
4325 mark_addressable (to);
4326 to_ptr = build_fold_addr_expr_loc (loc, to);
4327 gimplify_arg (&to_ptr, seq_p, loc);
4329 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4331 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4333 if (want_value)
4335 /* tmp = memcpy() */
4336 t = create_tmp_var (TREE_TYPE (to_ptr));
4337 gimple_call_set_lhs (gs, t);
4338 gimplify_seq_add_stmt (seq_p, gs);
4340 *expr_p = build_simple_mem_ref (t);
4341 return GS_ALL_DONE;
4344 gimplify_seq_add_stmt (seq_p, gs);
4345 *expr_p = NULL;
4346 return GS_ALL_DONE;
4349 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4350 a call to __builtin_memset. In this case we know that the RHS is
4351 a CONSTRUCTOR with an empty element list. */
4353 static enum gimplify_status
4354 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4355 gimple_seq *seq_p)
4357 tree t, from, to, to_ptr;
4358 gcall *gs;
4359 location_t loc = EXPR_LOCATION (*expr_p);
4361 /* Assert our assumptions, to abort instead of producing wrong code
4362 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4363 not be immediately exposed. */
4364 from = TREE_OPERAND (*expr_p, 1);
4365 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4366 from = TREE_OPERAND (from, 0);
4368 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4369 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4371 /* Now proceed. */
4372 to = TREE_OPERAND (*expr_p, 0);
4374 to_ptr = build_fold_addr_expr_loc (loc, to);
4375 gimplify_arg (&to_ptr, seq_p, loc);
4376 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4378 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4380 if (want_value)
4382 /* tmp = memset() */
4383 t = create_tmp_var (TREE_TYPE (to_ptr));
4384 gimple_call_set_lhs (gs, t);
4385 gimplify_seq_add_stmt (seq_p, gs);
4387 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4388 return GS_ALL_DONE;
4391 gimplify_seq_add_stmt (seq_p, gs);
4392 *expr_p = NULL;
4393 return GS_ALL_DONE;
4396 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4397 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4398 assignment. Return non-null if we detect a potential overlap. */
4400 struct gimplify_init_ctor_preeval_data
4402 /* The base decl of the lhs object. May be NULL, in which case we
4403 have to assume the lhs is indirect. */
4404 tree lhs_base_decl;
4406 /* The alias set of the lhs object. */
4407 alias_set_type lhs_alias_set;
4410 static tree
4411 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4413 struct gimplify_init_ctor_preeval_data *data
4414 = (struct gimplify_init_ctor_preeval_data *) xdata;
4415 tree t = *tp;
4417 /* If we find the base object, obviously we have overlap. */
4418 if (data->lhs_base_decl == t)
4419 return t;
4421 /* If the constructor component is indirect, determine if we have a
4422 potential overlap with the lhs. The only bits of information we
4423 have to go on at this point are addressability and alias sets. */
4424 if ((INDIRECT_REF_P (t)
4425 || TREE_CODE (t) == MEM_REF)
4426 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4427 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4428 return t;
4430 /* If the constructor component is a call, determine if it can hide a
4431 potential overlap with the lhs through an INDIRECT_REF like above.
4432 ??? Ugh - this is completely broken. In fact this whole analysis
4433 doesn't look conservative. */
4434 if (TREE_CODE (t) == CALL_EXPR)
4436 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4438 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4439 if (POINTER_TYPE_P (TREE_VALUE (type))
4440 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4441 && alias_sets_conflict_p (data->lhs_alias_set,
4442 get_alias_set
4443 (TREE_TYPE (TREE_VALUE (type)))))
4444 return t;
4447 if (IS_TYPE_OR_DECL_P (t))
4448 *walk_subtrees = 0;
4449 return NULL;
4452 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4453 force values that overlap with the lhs (as described by *DATA)
4454 into temporaries. */
4456 static void
4457 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4458 struct gimplify_init_ctor_preeval_data *data)
4460 enum gimplify_status one;
4462 /* If the value is constant, then there's nothing to pre-evaluate. */
4463 if (TREE_CONSTANT (*expr_p))
4465 /* Ensure it does not have side effects, it might contain a reference to
4466 the object we're initializing. */
4467 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4468 return;
4471 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4472 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4473 return;
4475 /* Recurse for nested constructors. */
4476 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4478 unsigned HOST_WIDE_INT ix;
4479 constructor_elt *ce;
4480 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4482 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4483 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4485 return;
4488 /* If this is a variable sized type, we must remember the size. */
4489 maybe_with_size_expr (expr_p);
4491 /* Gimplify the constructor element to something appropriate for the rhs
4492 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4493 the gimplifier will consider this a store to memory. Doing this
4494 gimplification now means that we won't have to deal with complicated
4495 language-specific trees, nor trees like SAVE_EXPR that can induce
4496 exponential search behavior. */
4497 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4498 if (one == GS_ERROR)
4500 *expr_p = NULL;
4501 return;
4504 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4505 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4506 always be true for all scalars, since is_gimple_mem_rhs insists on a
4507 temporary variable for them. */
4508 if (DECL_P (*expr_p))
4509 return;
4511 /* If this is of variable size, we have no choice but to assume it doesn't
4512 overlap since we can't make a temporary for it. */
4513 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4514 return;
4516 /* Otherwise, we must search for overlap ... */
4517 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4518 return;
4520 /* ... and if found, force the value into a temporary. */
4521 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4524 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4525 a RANGE_EXPR in a CONSTRUCTOR for an array.
4527 var = lower;
4528 loop_entry:
4529 object[var] = value;
4530 if (var == upper)
4531 goto loop_exit;
4532 var = var + 1;
4533 goto loop_entry;
4534 loop_exit:
4536 We increment var _after_ the loop exit check because we might otherwise
4537 fail if upper == TYPE_MAX_VALUE (type for upper).
4539 Note that we never have to deal with SAVE_EXPRs here, because this has
4540 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4542 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4543 gimple_seq *, bool);
4545 static void
4546 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4547 tree value, tree array_elt_type,
4548 gimple_seq *pre_p, bool cleared)
4550 tree loop_entry_label, loop_exit_label, fall_thru_label;
4551 tree var, var_type, cref, tmp;
4553 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4554 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4555 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4557 /* Create and initialize the index variable. */
4558 var_type = TREE_TYPE (upper);
4559 var = create_tmp_var (var_type);
4560 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4562 /* Add the loop entry label. */
4563 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4565 /* Build the reference. */
4566 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4567 var, NULL_TREE, NULL_TREE);
4569 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4570 the store. Otherwise just assign value to the reference. */
4572 if (TREE_CODE (value) == CONSTRUCTOR)
4573 /* NB we might have to call ourself recursively through
4574 gimplify_init_ctor_eval if the value is a constructor. */
4575 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4576 pre_p, cleared);
4577 else
4578 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4580 /* We exit the loop when the index var is equal to the upper bound. */
4581 gimplify_seq_add_stmt (pre_p,
4582 gimple_build_cond (EQ_EXPR, var, upper,
4583 loop_exit_label, fall_thru_label));
4585 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4587 /* Otherwise, increment the index var... */
4588 tmp = build2 (PLUS_EXPR, var_type, var,
4589 fold_convert (var_type, integer_one_node));
4590 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4592 /* ...and jump back to the loop entry. */
4593 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4595 /* Add the loop exit label. */
4596 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4599 /* Return true if FDECL is accessing a field that is zero sized. */
4601 static bool
4602 zero_sized_field_decl (const_tree fdecl)
4604 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4605 && integer_zerop (DECL_SIZE (fdecl)))
4606 return true;
4607 return false;
4610 /* Return true if TYPE is zero sized. */
4612 static bool
4613 zero_sized_type (const_tree type)
4615 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4616 && integer_zerop (TYPE_SIZE (type)))
4617 return true;
4618 return false;
4621 /* A subroutine of gimplify_init_constructor. Generate individual
4622 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4623 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4624 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4625 zeroed first. */
4627 static void
4628 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4629 gimple_seq *pre_p, bool cleared)
4631 tree array_elt_type = NULL;
4632 unsigned HOST_WIDE_INT ix;
4633 tree purpose, value;
4635 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4636 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4638 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4640 tree cref;
4642 /* NULL values are created above for gimplification errors. */
4643 if (value == NULL)
4644 continue;
4646 if (cleared && initializer_zerop (value))
4647 continue;
4649 /* ??? Here's to hoping the front end fills in all of the indices,
4650 so we don't have to figure out what's missing ourselves. */
4651 gcc_assert (purpose);
4653 /* Skip zero-sized fields, unless value has side-effects. This can
4654 happen with calls to functions returning a zero-sized type, which
4655 we shouldn't discard. As a number of downstream passes don't
4656 expect sets of zero-sized fields, we rely on the gimplification of
4657 the MODIFY_EXPR we make below to drop the assignment statement. */
4658 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4659 continue;
4661 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4662 whole range. */
4663 if (TREE_CODE (purpose) == RANGE_EXPR)
4665 tree lower = TREE_OPERAND (purpose, 0);
4666 tree upper = TREE_OPERAND (purpose, 1);
4668 /* If the lower bound is equal to upper, just treat it as if
4669 upper was the index. */
4670 if (simple_cst_equal (lower, upper))
4671 purpose = upper;
4672 else
4674 gimplify_init_ctor_eval_range (object, lower, upper, value,
4675 array_elt_type, pre_p, cleared);
4676 continue;
4680 if (array_elt_type)
4682 /* Do not use bitsizetype for ARRAY_REF indices. */
4683 if (TYPE_DOMAIN (TREE_TYPE (object)))
4684 purpose
4685 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4686 purpose);
4687 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4688 purpose, NULL_TREE, NULL_TREE);
4690 else
4692 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4693 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4694 unshare_expr (object), purpose, NULL_TREE);
4697 if (TREE_CODE (value) == CONSTRUCTOR
4698 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4699 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4700 pre_p, cleared);
4701 else
4703 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4704 gimplify_and_add (init, pre_p);
4705 ggc_free (init);
4710 /* Return the appropriate RHS predicate for this LHS. */
4712 gimple_predicate
4713 rhs_predicate_for (tree lhs)
4715 if (is_gimple_reg (lhs))
4716 return is_gimple_reg_rhs_or_call;
4717 else
4718 return is_gimple_mem_rhs_or_call;
4721 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4722 before the LHS has been gimplified. */
4724 static gimple_predicate
4725 initial_rhs_predicate_for (tree lhs)
4727 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4728 return is_gimple_reg_rhs_or_call;
4729 else
4730 return is_gimple_mem_rhs_or_call;
4733 /* Gimplify a C99 compound literal expression. This just means adding
4734 the DECL_EXPR before the current statement and using its anonymous
4735 decl instead. */
4737 static enum gimplify_status
4738 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4739 bool (*gimple_test_f) (tree),
4740 fallback_t fallback)
4742 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4743 tree decl = DECL_EXPR_DECL (decl_s);
4744 tree init = DECL_INITIAL (decl);
4745 /* Mark the decl as addressable if the compound literal
4746 expression is addressable now, otherwise it is marked too late
4747 after we gimplify the initialization expression. */
4748 if (TREE_ADDRESSABLE (*expr_p))
4749 TREE_ADDRESSABLE (decl) = 1;
4750 /* Otherwise, if we don't need an lvalue and have a literal directly
4751 substitute it. Check if it matches the gimple predicate, as
4752 otherwise we'd generate a new temporary, and we can as well just
4753 use the decl we already have. */
4754 else if (!TREE_ADDRESSABLE (decl)
4755 && !TREE_THIS_VOLATILE (decl)
4756 && init
4757 && (fallback & fb_lvalue) == 0
4758 && gimple_test_f (init))
4760 *expr_p = init;
4761 return GS_OK;
4764 /* If the decl is not addressable, then it is being used in some
4765 expression or on the right hand side of a statement, and it can
4766 be put into a readonly data section. */
4767 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4768 TREE_READONLY (decl) = 1;
4770 /* This decl isn't mentioned in the enclosing block, so add it to the
4771 list of temps. FIXME it seems a bit of a kludge to say that
4772 anonymous artificial vars aren't pushed, but everything else is. */
4773 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4774 gimple_add_tmp_var (decl);
4776 gimplify_and_add (decl_s, pre_p);
4777 *expr_p = decl;
4778 return GS_OK;
4781 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4782 return a new CONSTRUCTOR if something changed. */
4784 static tree
4785 optimize_compound_literals_in_ctor (tree orig_ctor)
4787 tree ctor = orig_ctor;
4788 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4789 unsigned int idx, num = vec_safe_length (elts);
4791 for (idx = 0; idx < num; idx++)
4793 tree value = (*elts)[idx].value;
4794 tree newval = value;
4795 if (TREE_CODE (value) == CONSTRUCTOR)
4796 newval = optimize_compound_literals_in_ctor (value);
4797 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4799 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4800 tree decl = DECL_EXPR_DECL (decl_s);
4801 tree init = DECL_INITIAL (decl);
4803 if (!TREE_ADDRESSABLE (value)
4804 && !TREE_ADDRESSABLE (decl)
4805 && init
4806 && TREE_CODE (init) == CONSTRUCTOR)
4807 newval = optimize_compound_literals_in_ctor (init);
4809 if (newval == value)
4810 continue;
4812 if (ctor == orig_ctor)
4814 ctor = copy_node (orig_ctor);
4815 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4816 elts = CONSTRUCTOR_ELTS (ctor);
4818 (*elts)[idx].value = newval;
4820 return ctor;
4823 /* A subroutine of gimplify_modify_expr. Break out elements of a
4824 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4826 Note that we still need to clear any elements that don't have explicit
4827 initializers, so if not all elements are initialized we keep the
4828 original MODIFY_EXPR, we just remove all of the constructor elements.
4830 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4831 GS_ERROR if we would have to create a temporary when gimplifying
4832 this constructor. Otherwise, return GS_OK.
4834 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4836 static enum gimplify_status
4837 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4838 bool want_value, bool notify_temp_creation)
4840 tree object, ctor, type;
4841 enum gimplify_status ret;
4842 vec<constructor_elt, va_gc> *elts;
4844 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4846 if (!notify_temp_creation)
4848 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4849 is_gimple_lvalue, fb_lvalue);
4850 if (ret == GS_ERROR)
4851 return ret;
4854 object = TREE_OPERAND (*expr_p, 0);
4855 ctor = TREE_OPERAND (*expr_p, 1)
4856 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4857 type = TREE_TYPE (ctor);
4858 elts = CONSTRUCTOR_ELTS (ctor);
4859 ret = GS_ALL_DONE;
4861 switch (TREE_CODE (type))
4863 case RECORD_TYPE:
4864 case UNION_TYPE:
4865 case QUAL_UNION_TYPE:
4866 case ARRAY_TYPE:
4868 /* Use readonly data for initializers of this or smaller size
4869 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4870 ratio. */
4871 const HOST_WIDE_INT min_unique_size = 64;
4872 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4873 is smaller than this, use readonly data. */
4874 const int unique_nonzero_ratio = 8;
4875 /* True if a single access of the object must be ensured. This is the
4876 case if the target is volatile, the type is non-addressable and more
4877 than one field need to be assigned. */
4878 const bool ensure_single_access
4879 = TREE_THIS_VOLATILE (object)
4880 && !TREE_ADDRESSABLE (type)
4881 && vec_safe_length (elts) > 1;
4882 struct gimplify_init_ctor_preeval_data preeval_data;
4883 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4884 HOST_WIDE_INT num_unique_nonzero_elements;
4885 bool cleared, complete_p, valid_const_initializer;
4887 /* Aggregate types must lower constructors to initialization of
4888 individual elements. The exception is that a CONSTRUCTOR node
4889 with no elements indicates zero-initialization of the whole. */
4890 if (vec_safe_is_empty (elts))
4892 if (notify_temp_creation)
4893 return GS_OK;
4894 break;
4897 /* Fetch information about the constructor to direct later processing.
4898 We might want to make static versions of it in various cases, and
4899 can only do so if it known to be a valid constant initializer. */
4900 valid_const_initializer
4901 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4902 &num_unique_nonzero_elements,
4903 &num_ctor_elements, &complete_p);
4905 /* If a const aggregate variable is being initialized, then it
4906 should never be a lose to promote the variable to be static. */
4907 if (valid_const_initializer
4908 && num_nonzero_elements > 1
4909 && TREE_READONLY (object)
4910 && VAR_P (object)
4911 && !DECL_REGISTER (object)
4912 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4913 /* For ctors that have many repeated nonzero elements
4914 represented through RANGE_EXPRs, prefer initializing
4915 those through runtime loops over copies of large amounts
4916 of data from readonly data section. */
4917 && (num_unique_nonzero_elements
4918 > num_nonzero_elements / unique_nonzero_ratio
4919 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4920 <= (unsigned HOST_WIDE_INT) min_unique_size)))
4922 if (notify_temp_creation)
4923 return GS_ERROR;
4925 DECL_INITIAL (object) = ctor;
4926 TREE_STATIC (object) = 1;
4927 if (!DECL_NAME (object))
4928 DECL_NAME (object) = create_tmp_var_name ("C");
4929 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4931 /* ??? C++ doesn't automatically append a .<number> to the
4932 assembler name, and even when it does, it looks at FE private
4933 data structures to figure out what that number should be,
4934 which are not set for this variable. I suppose this is
4935 important for local statics for inline functions, which aren't
4936 "local" in the object file sense. So in order to get a unique
4937 TU-local symbol, we must invoke the lhd version now. */
4938 lhd_set_decl_assembler_name (object);
4940 *expr_p = NULL_TREE;
4941 break;
4944 /* If there are "lots" of initialized elements, even discounting
4945 those that are not address constants (and thus *must* be
4946 computed at runtime), then partition the constructor into
4947 constant and non-constant parts. Block copy the constant
4948 parts in, then generate code for the non-constant parts. */
4949 /* TODO. There's code in cp/typeck.c to do this. */
4951 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4952 /* store_constructor will ignore the clearing of variable-sized
4953 objects. Initializers for such objects must explicitly set
4954 every field that needs to be set. */
4955 cleared = false;
4956 else if (!complete_p)
4957 /* If the constructor isn't complete, clear the whole object
4958 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4960 ??? This ought not to be needed. For any element not present
4961 in the initializer, we should simply set them to zero. Except
4962 we'd need to *find* the elements that are not present, and that
4963 requires trickery to avoid quadratic compile-time behavior in
4964 large cases or excessive memory use in small cases. */
4965 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4966 else if (num_ctor_elements - num_nonzero_elements
4967 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4968 && num_nonzero_elements < num_ctor_elements / 4)
4969 /* If there are "lots" of zeros, it's more efficient to clear
4970 the memory and then set the nonzero elements. */
4971 cleared = true;
4972 else if (ensure_single_access && num_nonzero_elements == 0)
4973 /* If a single access to the target must be ensured and all elements
4974 are zero, then it's optimal to clear whatever their number. */
4975 cleared = true;
4976 else
4977 cleared = false;
4979 /* If there are "lots" of initialized elements, and all of them
4980 are valid address constants, then the entire initializer can
4981 be dropped to memory, and then memcpy'd out. Don't do this
4982 for sparse arrays, though, as it's more efficient to follow
4983 the standard CONSTRUCTOR behavior of memset followed by
4984 individual element initialization. Also don't do this for small
4985 all-zero initializers (which aren't big enough to merit
4986 clearing), and don't try to make bitwise copies of
4987 TREE_ADDRESSABLE types. */
4988 if (valid_const_initializer
4989 && complete_p
4990 && !(cleared || num_nonzero_elements == 0)
4991 && !TREE_ADDRESSABLE (type))
4993 HOST_WIDE_INT size = int_size_in_bytes (type);
4994 unsigned int align;
4996 /* ??? We can still get unbounded array types, at least
4997 from the C++ front end. This seems wrong, but attempt
4998 to work around it for now. */
4999 if (size < 0)
5001 size = int_size_in_bytes (TREE_TYPE (object));
5002 if (size >= 0)
5003 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5006 /* Find the maximum alignment we can assume for the object. */
5007 /* ??? Make use of DECL_OFFSET_ALIGN. */
5008 if (DECL_P (object))
5009 align = DECL_ALIGN (object);
5010 else
5011 align = TYPE_ALIGN (type);
5013 /* Do a block move either if the size is so small as to make
5014 each individual move a sub-unit move on average, or if it
5015 is so large as to make individual moves inefficient. */
5016 if (size > 0
5017 && num_nonzero_elements > 1
5018 /* For ctors that have many repeated nonzero elements
5019 represented through RANGE_EXPRs, prefer initializing
5020 those through runtime loops over copies of large amounts
5021 of data from readonly data section. */
5022 && (num_unique_nonzero_elements
5023 > num_nonzero_elements / unique_nonzero_ratio
5024 || size <= min_unique_size)
5025 && (size < num_nonzero_elements
5026 || !can_move_by_pieces (size, align)))
5028 if (notify_temp_creation)
5029 return GS_ERROR;
5031 walk_tree (&ctor, force_labels_r, NULL, NULL);
5032 ctor = tree_output_constant_def (ctor);
5033 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5034 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5035 TREE_OPERAND (*expr_p, 1) = ctor;
5037 /* This is no longer an assignment of a CONSTRUCTOR, but
5038 we still may have processing to do on the LHS. So
5039 pretend we didn't do anything here to let that happen. */
5040 return GS_UNHANDLED;
5044 /* If a single access to the target must be ensured and there are
5045 nonzero elements or the zero elements are not assigned en masse,
5046 initialize the target from a temporary. */
5047 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5049 if (notify_temp_creation)
5050 return GS_ERROR;
5052 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5053 TREE_OPERAND (*expr_p, 0) = temp;
5054 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5055 *expr_p,
5056 build2 (MODIFY_EXPR, void_type_node,
5057 object, temp));
5058 return GS_OK;
5061 if (notify_temp_creation)
5062 return GS_OK;
5064 /* If there are nonzero elements and if needed, pre-evaluate to capture
5065 elements overlapping with the lhs into temporaries. We must do this
5066 before clearing to fetch the values before they are zeroed-out. */
5067 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5069 preeval_data.lhs_base_decl = get_base_address (object);
5070 if (!DECL_P (preeval_data.lhs_base_decl))
5071 preeval_data.lhs_base_decl = NULL;
5072 preeval_data.lhs_alias_set = get_alias_set (object);
5074 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5075 pre_p, post_p, &preeval_data);
5078 bool ctor_has_side_effects_p
5079 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5081 if (cleared)
5083 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5084 Note that we still have to gimplify, in order to handle the
5085 case of variable sized types. Avoid shared tree structures. */
5086 CONSTRUCTOR_ELTS (ctor) = NULL;
5087 TREE_SIDE_EFFECTS (ctor) = 0;
5088 object = unshare_expr (object);
5089 gimplify_stmt (expr_p, pre_p);
5092 /* If we have not block cleared the object, or if there are nonzero
5093 elements in the constructor, or if the constructor has side effects,
5094 add assignments to the individual scalar fields of the object. */
5095 if (!cleared
5096 || num_nonzero_elements > 0
5097 || ctor_has_side_effects_p)
5098 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5100 *expr_p = NULL_TREE;
5102 break;
5104 case COMPLEX_TYPE:
5106 tree r, i;
5108 if (notify_temp_creation)
5109 return GS_OK;
5111 /* Extract the real and imaginary parts out of the ctor. */
5112 gcc_assert (elts->length () == 2);
5113 r = (*elts)[0].value;
5114 i = (*elts)[1].value;
5115 if (r == NULL || i == NULL)
5117 tree zero = build_zero_cst (TREE_TYPE (type));
5118 if (r == NULL)
5119 r = zero;
5120 if (i == NULL)
5121 i = zero;
5124 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5125 represent creation of a complex value. */
5126 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5128 ctor = build_complex (type, r, i);
5129 TREE_OPERAND (*expr_p, 1) = ctor;
5131 else
5133 ctor = build2 (COMPLEX_EXPR, type, r, i);
5134 TREE_OPERAND (*expr_p, 1) = ctor;
5135 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5136 pre_p,
5137 post_p,
5138 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5139 fb_rvalue);
5142 break;
5144 case VECTOR_TYPE:
5146 unsigned HOST_WIDE_INT ix;
5147 constructor_elt *ce;
5149 if (notify_temp_creation)
5150 return GS_OK;
5152 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5153 if (TREE_CONSTANT (ctor))
5155 bool constant_p = true;
5156 tree value;
5158 /* Even when ctor is constant, it might contain non-*_CST
5159 elements, such as addresses or trapping values like
5160 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5161 in VECTOR_CST nodes. */
5162 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5163 if (!CONSTANT_CLASS_P (value))
5165 constant_p = false;
5166 break;
5169 if (constant_p)
5171 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5172 break;
5175 TREE_CONSTANT (ctor) = 0;
5178 /* Vector types use CONSTRUCTOR all the way through gimple
5179 compilation as a general initializer. */
5180 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5182 enum gimplify_status tret;
5183 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5184 fb_rvalue);
5185 if (tret == GS_ERROR)
5186 ret = GS_ERROR;
5187 else if (TREE_STATIC (ctor)
5188 && !initializer_constant_valid_p (ce->value,
5189 TREE_TYPE (ce->value)))
5190 TREE_STATIC (ctor) = 0;
5192 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5193 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5195 break;
5197 default:
5198 /* So how did we get a CONSTRUCTOR for a scalar type? */
5199 gcc_unreachable ();
5202 if (ret == GS_ERROR)
5203 return GS_ERROR;
5204 /* If we have gimplified both sides of the initializer but have
5205 not emitted an assignment, do so now. */
5206 if (*expr_p)
5208 tree lhs = TREE_OPERAND (*expr_p, 0);
5209 tree rhs = TREE_OPERAND (*expr_p, 1);
5210 if (want_value && object == lhs)
5211 lhs = unshare_expr (lhs);
5212 gassign *init = gimple_build_assign (lhs, rhs);
5213 gimplify_seq_add_stmt (pre_p, init);
5215 if (want_value)
5217 *expr_p = object;
5218 return GS_OK;
5220 else
5222 *expr_p = NULL;
5223 return GS_ALL_DONE;
5227 /* Given a pointer value OP0, return a simplified version of an
5228 indirection through OP0, or NULL_TREE if no simplification is
5229 possible. This may only be applied to a rhs of an expression.
5230 Note that the resulting type may be different from the type pointed
5231 to in the sense that it is still compatible from the langhooks
5232 point of view. */
5234 static tree
5235 gimple_fold_indirect_ref_rhs (tree t)
5237 return gimple_fold_indirect_ref (t);
5240 /* Subroutine of gimplify_modify_expr to do simplifications of
5241 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5242 something changes. */
5244 static enum gimplify_status
5245 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5246 gimple_seq *pre_p, gimple_seq *post_p,
5247 bool want_value)
5249 enum gimplify_status ret = GS_UNHANDLED;
5250 bool changed;
5254 changed = false;
5255 switch (TREE_CODE (*from_p))
5257 case VAR_DECL:
5258 /* If we're assigning from a read-only variable initialized with
5259 a constructor and not volatile, do the direct assignment from
5260 the constructor, but only if the target is not volatile either
5261 since this latter assignment might end up being done on a per
5262 field basis. However, if the target is volatile and the type
5263 is aggregate and non-addressable, gimplify_init_constructor
5264 knows that it needs to ensure a single access to the target
5265 and it will return GS_OK only in this case. */
5266 if (TREE_READONLY (*from_p)
5267 && DECL_INITIAL (*from_p)
5268 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5269 && !TREE_THIS_VOLATILE (*from_p)
5270 && (!TREE_THIS_VOLATILE (*to_p)
5271 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5272 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5274 tree old_from = *from_p;
5275 enum gimplify_status subret;
5277 /* Move the constructor into the RHS. */
5278 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5280 /* Let's see if gimplify_init_constructor will need to put
5281 it in memory. */
5282 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5283 false, true);
5284 if (subret == GS_ERROR)
5286 /* If so, revert the change. */
5287 *from_p = old_from;
5289 else
5291 ret = GS_OK;
5292 changed = true;
5295 break;
5296 case INDIRECT_REF:
5298 /* If we have code like
5300 *(const A*)(A*)&x
5302 where the type of "x" is a (possibly cv-qualified variant
5303 of "A"), treat the entire expression as identical to "x".
5304 This kind of code arises in C++ when an object is bound
5305 to a const reference, and if "x" is a TARGET_EXPR we want
5306 to take advantage of the optimization below. */
5307 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5308 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5309 if (t)
5311 if (TREE_THIS_VOLATILE (t) != volatile_p)
5313 if (DECL_P (t))
5314 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5315 build_fold_addr_expr (t));
5316 if (REFERENCE_CLASS_P (t))
5317 TREE_THIS_VOLATILE (t) = volatile_p;
5319 *from_p = t;
5320 ret = GS_OK;
5321 changed = true;
5323 break;
5326 case TARGET_EXPR:
5328 /* If we are initializing something from a TARGET_EXPR, strip the
5329 TARGET_EXPR and initialize it directly, if possible. This can't
5330 be done if the initializer is void, since that implies that the
5331 temporary is set in some non-trivial way.
5333 ??? What about code that pulls out the temp and uses it
5334 elsewhere? I think that such code never uses the TARGET_EXPR as
5335 an initializer. If I'm wrong, we'll die because the temp won't
5336 have any RTL. In that case, I guess we'll need to replace
5337 references somehow. */
5338 tree init = TARGET_EXPR_INITIAL (*from_p);
5340 if (init
5341 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5342 || !TARGET_EXPR_NO_ELIDE (*from_p))
5343 && !VOID_TYPE_P (TREE_TYPE (init)))
5345 *from_p = init;
5346 ret = GS_OK;
5347 changed = true;
5350 break;
5352 case COMPOUND_EXPR:
5353 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5354 caught. */
5355 gimplify_compound_expr (from_p, pre_p, true);
5356 ret = GS_OK;
5357 changed = true;
5358 break;
5360 case CONSTRUCTOR:
5361 /* If we already made some changes, let the front end have a
5362 crack at this before we break it down. */
5363 if (ret != GS_UNHANDLED)
5364 break;
5365 /* If we're initializing from a CONSTRUCTOR, break this into
5366 individual MODIFY_EXPRs. */
5367 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5368 false);
5370 case COND_EXPR:
5371 /* If we're assigning to a non-register type, push the assignment
5372 down into the branches. This is mandatory for ADDRESSABLE types,
5373 since we cannot generate temporaries for such, but it saves a
5374 copy in other cases as well. */
5375 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5377 /* This code should mirror the code in gimplify_cond_expr. */
5378 enum tree_code code = TREE_CODE (*expr_p);
5379 tree cond = *from_p;
5380 tree result = *to_p;
5382 ret = gimplify_expr (&result, pre_p, post_p,
5383 is_gimple_lvalue, fb_lvalue);
5384 if (ret != GS_ERROR)
5385 ret = GS_OK;
5387 /* If we are going to write RESULT more than once, clear
5388 TREE_READONLY flag, otherwise we might incorrectly promote
5389 the variable to static const and initialize it at compile
5390 time in one of the branches. */
5391 if (VAR_P (result)
5392 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5393 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5394 TREE_READONLY (result) = 0;
5395 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5396 TREE_OPERAND (cond, 1)
5397 = build2 (code, void_type_node, result,
5398 TREE_OPERAND (cond, 1));
5399 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5400 TREE_OPERAND (cond, 2)
5401 = build2 (code, void_type_node, unshare_expr (result),
5402 TREE_OPERAND (cond, 2));
5404 TREE_TYPE (cond) = void_type_node;
5405 recalculate_side_effects (cond);
5407 if (want_value)
5409 gimplify_and_add (cond, pre_p);
5410 *expr_p = unshare_expr (result);
5412 else
5413 *expr_p = cond;
5414 return ret;
5416 break;
5418 case CALL_EXPR:
5419 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5420 return slot so that we don't generate a temporary. */
5421 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5422 && aggregate_value_p (*from_p, *from_p))
5424 bool use_target;
5426 if (!(rhs_predicate_for (*to_p))(*from_p))
5427 /* If we need a temporary, *to_p isn't accurate. */
5428 use_target = false;
5429 /* It's OK to use the return slot directly unless it's an NRV. */
5430 else if (TREE_CODE (*to_p) == RESULT_DECL
5431 && DECL_NAME (*to_p) == NULL_TREE
5432 && needs_to_live_in_memory (*to_p))
5433 use_target = true;
5434 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5435 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5436 /* Don't force regs into memory. */
5437 use_target = false;
5438 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5439 /* It's OK to use the target directly if it's being
5440 initialized. */
5441 use_target = true;
5442 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5443 != INTEGER_CST)
5444 /* Always use the target and thus RSO for variable-sized types.
5445 GIMPLE cannot deal with a variable-sized assignment
5446 embedded in a call statement. */
5447 use_target = true;
5448 else if (TREE_CODE (*to_p) != SSA_NAME
5449 && (!is_gimple_variable (*to_p)
5450 || needs_to_live_in_memory (*to_p)))
5451 /* Don't use the original target if it's already addressable;
5452 if its address escapes, and the called function uses the
5453 NRV optimization, a conforming program could see *to_p
5454 change before the called function returns; see c++/19317.
5455 When optimizing, the return_slot pass marks more functions
5456 as safe after we have escape info. */
5457 use_target = false;
5458 else
5459 use_target = true;
5461 if (use_target)
5463 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5464 mark_addressable (*to_p);
5467 break;
5469 case WITH_SIZE_EXPR:
5470 /* Likewise for calls that return an aggregate of non-constant size,
5471 since we would not be able to generate a temporary at all. */
5472 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5474 *from_p = TREE_OPERAND (*from_p, 0);
5475 /* We don't change ret in this case because the
5476 WITH_SIZE_EXPR might have been added in
5477 gimplify_modify_expr, so returning GS_OK would lead to an
5478 infinite loop. */
5479 changed = true;
5481 break;
5483 /* If we're initializing from a container, push the initialization
5484 inside it. */
5485 case CLEANUP_POINT_EXPR:
5486 case BIND_EXPR:
5487 case STATEMENT_LIST:
5489 tree wrap = *from_p;
5490 tree t;
5492 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5493 fb_lvalue);
5494 if (ret != GS_ERROR)
5495 ret = GS_OK;
5497 t = voidify_wrapper_expr (wrap, *expr_p);
5498 gcc_assert (t == *expr_p);
5500 if (want_value)
5502 gimplify_and_add (wrap, pre_p);
5503 *expr_p = unshare_expr (*to_p);
5505 else
5506 *expr_p = wrap;
5507 return GS_OK;
5510 case COMPOUND_LITERAL_EXPR:
5512 tree complit = TREE_OPERAND (*expr_p, 1);
5513 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5514 tree decl = DECL_EXPR_DECL (decl_s);
5515 tree init = DECL_INITIAL (decl);
5517 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5518 into struct T x = { 0, 1, 2 } if the address of the
5519 compound literal has never been taken. */
5520 if (!TREE_ADDRESSABLE (complit)
5521 && !TREE_ADDRESSABLE (decl)
5522 && init)
5524 *expr_p = copy_node (*expr_p);
5525 TREE_OPERAND (*expr_p, 1) = init;
5526 return GS_OK;
5530 default:
5531 break;
5534 while (changed);
5536 return ret;
5540 /* Return true if T looks like a valid GIMPLE statement. */
5542 static bool
5543 is_gimple_stmt (tree t)
5545 const enum tree_code code = TREE_CODE (t);
5547 switch (code)
5549 case NOP_EXPR:
5550 /* The only valid NOP_EXPR is the empty statement. */
5551 return IS_EMPTY_STMT (t);
5553 case BIND_EXPR:
5554 case COND_EXPR:
5555 /* These are only valid if they're void. */
5556 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5558 case SWITCH_EXPR:
5559 case GOTO_EXPR:
5560 case RETURN_EXPR:
5561 case LABEL_EXPR:
5562 case CASE_LABEL_EXPR:
5563 case TRY_CATCH_EXPR:
5564 case TRY_FINALLY_EXPR:
5565 case EH_FILTER_EXPR:
5566 case CATCH_EXPR:
5567 case ASM_EXPR:
5568 case STATEMENT_LIST:
5569 case OACC_PARALLEL:
5570 case OACC_KERNELS:
5571 case OACC_SERIAL:
5572 case OACC_DATA:
5573 case OACC_HOST_DATA:
5574 case OACC_DECLARE:
5575 case OACC_UPDATE:
5576 case OACC_ENTER_DATA:
5577 case OACC_EXIT_DATA:
5578 case OACC_CACHE:
5579 case OMP_PARALLEL:
5580 case OMP_FOR:
5581 case OMP_SIMD:
5582 case OMP_DISTRIBUTE:
5583 case OMP_LOOP:
5584 case OACC_LOOP:
5585 case OMP_SCAN:
5586 case OMP_SECTIONS:
5587 case OMP_SECTION:
5588 case OMP_SINGLE:
5589 case OMP_MASTER:
5590 case OMP_TASKGROUP:
5591 case OMP_ORDERED:
5592 case OMP_CRITICAL:
5593 case OMP_TASK:
5594 case OMP_TARGET:
5595 case OMP_TARGET_DATA:
5596 case OMP_TARGET_UPDATE:
5597 case OMP_TARGET_ENTER_DATA:
5598 case OMP_TARGET_EXIT_DATA:
5599 case OMP_TASKLOOP:
5600 case OMP_TEAMS:
5601 /* These are always void. */
5602 return true;
5604 case CALL_EXPR:
5605 case MODIFY_EXPR:
5606 case PREDICT_EXPR:
5607 /* These are valid regardless of their type. */
5608 return true;
5610 default:
5611 return false;
5616 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5617 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5619 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5620 other, unmodified part of the complex object just before the total store.
5621 As a consequence, if the object is still uninitialized, an undefined value
5622 will be loaded into a register, which may result in a spurious exception
5623 if the register is floating-point and the value happens to be a signaling
5624 NaN for example. Then the fully-fledged complex operations lowering pass
5625 followed by a DCE pass are necessary in order to fix things up. */
5627 static enum gimplify_status
5628 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5629 bool want_value)
5631 enum tree_code code, ocode;
5632 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5634 lhs = TREE_OPERAND (*expr_p, 0);
5635 rhs = TREE_OPERAND (*expr_p, 1);
5636 code = TREE_CODE (lhs);
5637 lhs = TREE_OPERAND (lhs, 0);
5639 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5640 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5641 TREE_NO_WARNING (other) = 1;
5642 other = get_formal_tmp_var (other, pre_p);
5644 realpart = code == REALPART_EXPR ? rhs : other;
5645 imagpart = code == REALPART_EXPR ? other : rhs;
5647 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5648 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5649 else
5650 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5652 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5653 *expr_p = (want_value) ? rhs : NULL_TREE;
5655 return GS_ALL_DONE;
5658 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5660 modify_expr
5661 : varname '=' rhs
5662 | '*' ID '=' rhs
5664 PRE_P points to the list where side effects that must happen before
5665 *EXPR_P should be stored.
5667 POST_P points to the list where side effects that must happen after
5668 *EXPR_P should be stored.
5670 WANT_VALUE is nonzero iff we want to use the value of this expression
5671 in another expression. */
5673 static enum gimplify_status
5674 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5675 bool want_value)
5677 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5678 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5679 enum gimplify_status ret = GS_UNHANDLED;
5680 gimple *assign;
5681 location_t loc = EXPR_LOCATION (*expr_p);
5682 gimple_stmt_iterator gsi;
5684 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5685 || TREE_CODE (*expr_p) == INIT_EXPR);
5687 /* Trying to simplify a clobber using normal logic doesn't work,
5688 so handle it here. */
5689 if (TREE_CLOBBER_P (*from_p))
5691 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5692 if (ret == GS_ERROR)
5693 return ret;
5694 gcc_assert (!want_value);
5695 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5697 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5698 pre_p, post_p);
5699 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5701 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5702 *expr_p = NULL;
5703 return GS_ALL_DONE;
5706 /* Insert pointer conversions required by the middle-end that are not
5707 required by the frontend. This fixes middle-end type checking for
5708 for example gcc.dg/redecl-6.c. */
5709 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5711 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5712 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5713 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5716 /* See if any simplifications can be done based on what the RHS is. */
5717 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5718 want_value);
5719 if (ret != GS_UNHANDLED)
5720 return ret;
5722 /* For zero sized types only gimplify the left hand side and right hand
5723 side as statements and throw away the assignment. Do this after
5724 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5725 types properly. */
5726 if (zero_sized_type (TREE_TYPE (*from_p))
5727 && !want_value
5728 /* Don't do this for calls that return addressable types, expand_call
5729 relies on those having a lhs. */
5730 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5731 && TREE_CODE (*from_p) == CALL_EXPR))
5733 gimplify_stmt (from_p, pre_p);
5734 gimplify_stmt (to_p, pre_p);
5735 *expr_p = NULL_TREE;
5736 return GS_ALL_DONE;
5739 /* If the value being copied is of variable width, compute the length
5740 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5741 before gimplifying any of the operands so that we can resolve any
5742 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5743 the size of the expression to be copied, not of the destination, so
5744 that is what we must do here. */
5745 maybe_with_size_expr (from_p);
5747 /* As a special case, we have to temporarily allow for assignments
5748 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5749 a toplevel statement, when gimplifying the GENERIC expression
5750 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5751 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5753 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5754 prevent gimplify_expr from trying to create a new temporary for
5755 foo's LHS, we tell it that it should only gimplify until it
5756 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5757 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5758 and all we need to do here is set 'a' to be its LHS. */
5760 /* Gimplify the RHS first for C++17 and bug 71104. */
5761 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5762 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5763 if (ret == GS_ERROR)
5764 return ret;
5766 /* Then gimplify the LHS. */
5767 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5768 twice we have to make sure to gimplify into non-SSA as otherwise
5769 the abnormal edge added later will make those defs not dominate
5770 their uses.
5771 ??? Technically this applies only to the registers used in the
5772 resulting non-register *TO_P. */
5773 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5774 if (saved_into_ssa
5775 && TREE_CODE (*from_p) == CALL_EXPR
5776 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5777 gimplify_ctxp->into_ssa = false;
5778 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5779 gimplify_ctxp->into_ssa = saved_into_ssa;
5780 if (ret == GS_ERROR)
5781 return ret;
5783 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5784 guess for the predicate was wrong. */
5785 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5786 if (final_pred != initial_pred)
5788 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5789 if (ret == GS_ERROR)
5790 return ret;
5793 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5794 size as argument to the call. */
5795 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5797 tree call = TREE_OPERAND (*from_p, 0);
5798 tree vlasize = TREE_OPERAND (*from_p, 1);
5800 if (TREE_CODE (call) == CALL_EXPR
5801 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5803 int nargs = call_expr_nargs (call);
5804 tree type = TREE_TYPE (call);
5805 tree ap = CALL_EXPR_ARG (call, 0);
5806 tree tag = CALL_EXPR_ARG (call, 1);
5807 tree aptag = CALL_EXPR_ARG (call, 2);
5808 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5809 IFN_VA_ARG, type,
5810 nargs + 1, ap, tag,
5811 aptag, vlasize);
5812 TREE_OPERAND (*from_p, 0) = newcall;
5816 /* Now see if the above changed *from_p to something we handle specially. */
5817 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5818 want_value);
5819 if (ret != GS_UNHANDLED)
5820 return ret;
5822 /* If we've got a variable sized assignment between two lvalues (i.e. does
5823 not involve a call), then we can make things a bit more straightforward
5824 by converting the assignment to memcpy or memset. */
5825 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5827 tree from = TREE_OPERAND (*from_p, 0);
5828 tree size = TREE_OPERAND (*from_p, 1);
5830 if (TREE_CODE (from) == CONSTRUCTOR)
5831 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5833 if (is_gimple_addressable (from))
5835 *from_p = from;
5836 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5837 pre_p);
5841 /* Transform partial stores to non-addressable complex variables into
5842 total stores. This allows us to use real instead of virtual operands
5843 for these variables, which improves optimization. */
5844 if ((TREE_CODE (*to_p) == REALPART_EXPR
5845 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5846 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5847 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5849 /* Try to alleviate the effects of the gimplification creating artificial
5850 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5851 make sure not to create DECL_DEBUG_EXPR links across functions. */
5852 if (!gimplify_ctxp->into_ssa
5853 && VAR_P (*from_p)
5854 && DECL_IGNORED_P (*from_p)
5855 && DECL_P (*to_p)
5856 && !DECL_IGNORED_P (*to_p)
5857 && decl_function_context (*to_p) == current_function_decl
5858 && decl_function_context (*from_p) == current_function_decl)
5860 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5861 DECL_NAME (*from_p)
5862 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5863 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5864 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5867 if (want_value && TREE_THIS_VOLATILE (*to_p))
5868 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5870 if (TREE_CODE (*from_p) == CALL_EXPR)
5872 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5873 instead of a GIMPLE_ASSIGN. */
5874 gcall *call_stmt;
5875 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5877 /* Gimplify internal functions created in the FEs. */
5878 int nargs = call_expr_nargs (*from_p), i;
5879 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5880 auto_vec<tree> vargs (nargs);
5882 for (i = 0; i < nargs; i++)
5884 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5885 EXPR_LOCATION (*from_p));
5886 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5888 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5889 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5890 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5892 else
5894 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5895 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5896 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5897 tree fndecl = get_callee_fndecl (*from_p);
5898 if (fndecl
5899 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5900 && call_expr_nargs (*from_p) == 3)
5901 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5902 CALL_EXPR_ARG (*from_p, 0),
5903 CALL_EXPR_ARG (*from_p, 1),
5904 CALL_EXPR_ARG (*from_p, 2));
5905 else
5907 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5910 notice_special_calls (call_stmt);
5911 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5912 gimple_call_set_lhs (call_stmt, *to_p);
5913 else if (TREE_CODE (*to_p) == SSA_NAME)
5914 /* The above is somewhat premature, avoid ICEing later for a
5915 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5916 ??? This doesn't make it a default-def. */
5917 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5919 assign = call_stmt;
5921 else
5923 assign = gimple_build_assign (*to_p, *from_p);
5924 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5925 if (COMPARISON_CLASS_P (*from_p))
5926 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5929 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5931 /* We should have got an SSA name from the start. */
5932 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5933 || ! gimple_in_ssa_p (cfun));
5936 gimplify_seq_add_stmt (pre_p, assign);
5937 gsi = gsi_last (*pre_p);
5938 maybe_fold_stmt (&gsi);
5940 if (want_value)
5942 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5943 return GS_OK;
5945 else
5946 *expr_p = NULL;
5948 return GS_ALL_DONE;
5951 /* Gimplify a comparison between two variable-sized objects. Do this
5952 with a call to BUILT_IN_MEMCMP. */
5954 static enum gimplify_status
5955 gimplify_variable_sized_compare (tree *expr_p)
5957 location_t loc = EXPR_LOCATION (*expr_p);
5958 tree op0 = TREE_OPERAND (*expr_p, 0);
5959 tree op1 = TREE_OPERAND (*expr_p, 1);
5960 tree t, arg, dest, src, expr;
5962 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5963 arg = unshare_expr (arg);
5964 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5965 src = build_fold_addr_expr_loc (loc, op1);
5966 dest = build_fold_addr_expr_loc (loc, op0);
5967 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5968 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5970 expr
5971 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5972 SET_EXPR_LOCATION (expr, loc);
5973 *expr_p = expr;
5975 return GS_OK;
5978 /* Gimplify a comparison between two aggregate objects of integral scalar
5979 mode as a comparison between the bitwise equivalent scalar values. */
5981 static enum gimplify_status
5982 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5984 location_t loc = EXPR_LOCATION (*expr_p);
5985 tree op0 = TREE_OPERAND (*expr_p, 0);
5986 tree op1 = TREE_OPERAND (*expr_p, 1);
5988 tree type = TREE_TYPE (op0);
5989 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5991 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5992 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5994 *expr_p
5995 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5997 return GS_OK;
6000 /* Gimplify an expression sequence. This function gimplifies each
6001 expression and rewrites the original expression with the last
6002 expression of the sequence in GIMPLE form.
6004 PRE_P points to the list where the side effects for all the
6005 expressions in the sequence will be emitted.
6007 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6009 static enum gimplify_status
6010 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6012 tree t = *expr_p;
6016 tree *sub_p = &TREE_OPERAND (t, 0);
6018 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6019 gimplify_compound_expr (sub_p, pre_p, false);
6020 else
6021 gimplify_stmt (sub_p, pre_p);
6023 t = TREE_OPERAND (t, 1);
6025 while (TREE_CODE (t) == COMPOUND_EXPR);
6027 *expr_p = t;
6028 if (want_value)
6029 return GS_OK;
6030 else
6032 gimplify_stmt (expr_p, pre_p);
6033 return GS_ALL_DONE;
6037 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6038 gimplify. After gimplification, EXPR_P will point to a new temporary
6039 that holds the original value of the SAVE_EXPR node.
6041 PRE_P points to the list where side effects that must happen before
6042 *EXPR_P should be stored. */
6044 static enum gimplify_status
6045 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6047 enum gimplify_status ret = GS_ALL_DONE;
6048 tree val;
6050 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6051 val = TREE_OPERAND (*expr_p, 0);
6053 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6054 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6056 /* The operand may be a void-valued expression. It is
6057 being executed only for its side-effects. */
6058 if (TREE_TYPE (val) == void_type_node)
6060 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6061 is_gimple_stmt, fb_none);
6062 val = NULL;
6064 else
6065 /* The temporary may not be an SSA name as later abnormal and EH
6066 control flow may invalidate use/def domination. When in SSA
6067 form then assume there are no such issues and SAVE_EXPRs only
6068 appear via GENERIC foldings. */
6069 val = get_initialized_tmp_var (val, pre_p, post_p,
6070 gimple_in_ssa_p (cfun));
6072 TREE_OPERAND (*expr_p, 0) = val;
6073 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6076 *expr_p = val;
6078 return ret;
6081 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6083 unary_expr
6084 : ...
6085 | '&' varname
6088 PRE_P points to the list where side effects that must happen before
6089 *EXPR_P should be stored.
6091 POST_P points to the list where side effects that must happen after
6092 *EXPR_P should be stored. */
6094 static enum gimplify_status
6095 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6097 tree expr = *expr_p;
6098 tree op0 = TREE_OPERAND (expr, 0);
6099 enum gimplify_status ret;
6100 location_t loc = EXPR_LOCATION (*expr_p);
6102 switch (TREE_CODE (op0))
6104 case INDIRECT_REF:
6105 do_indirect_ref:
6106 /* Check if we are dealing with an expression of the form '&*ptr'.
6107 While the front end folds away '&*ptr' into 'ptr', these
6108 expressions may be generated internally by the compiler (e.g.,
6109 builtins like __builtin_va_end). */
6110 /* Caution: the silent array decomposition semantics we allow for
6111 ADDR_EXPR means we can't always discard the pair. */
6112 /* Gimplification of the ADDR_EXPR operand may drop
6113 cv-qualification conversions, so make sure we add them if
6114 needed. */
6116 tree op00 = TREE_OPERAND (op0, 0);
6117 tree t_expr = TREE_TYPE (expr);
6118 tree t_op00 = TREE_TYPE (op00);
6120 if (!useless_type_conversion_p (t_expr, t_op00))
6121 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6122 *expr_p = op00;
6123 ret = GS_OK;
6125 break;
6127 case VIEW_CONVERT_EXPR:
6128 /* Take the address of our operand and then convert it to the type of
6129 this ADDR_EXPR.
6131 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6132 all clear. The impact of this transformation is even less clear. */
6134 /* If the operand is a useless conversion, look through it. Doing so
6135 guarantees that the ADDR_EXPR and its operand will remain of the
6136 same type. */
6137 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6138 op0 = TREE_OPERAND (op0, 0);
6140 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6141 build_fold_addr_expr_loc (loc,
6142 TREE_OPERAND (op0, 0)));
6143 ret = GS_OK;
6144 break;
6146 case MEM_REF:
6147 if (integer_zerop (TREE_OPERAND (op0, 1)))
6148 goto do_indirect_ref;
6150 /* fall through */
6152 default:
6153 /* If we see a call to a declared builtin or see its address
6154 being taken (we can unify those cases here) then we can mark
6155 the builtin for implicit generation by GCC. */
6156 if (TREE_CODE (op0) == FUNCTION_DECL
6157 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6158 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6159 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6161 /* We use fb_either here because the C frontend sometimes takes
6162 the address of a call that returns a struct; see
6163 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6164 the implied temporary explicit. */
6166 /* Make the operand addressable. */
6167 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6168 is_gimple_addressable, fb_either);
6169 if (ret == GS_ERROR)
6170 break;
6172 /* Then mark it. Beware that it may not be possible to do so directly
6173 if a temporary has been created by the gimplification. */
6174 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6176 op0 = TREE_OPERAND (expr, 0);
6178 /* For various reasons, the gimplification of the expression
6179 may have made a new INDIRECT_REF. */
6180 if (TREE_CODE (op0) == INDIRECT_REF
6181 || (TREE_CODE (op0) == MEM_REF
6182 && integer_zerop (TREE_OPERAND (op0, 1))))
6183 goto do_indirect_ref;
6185 mark_addressable (TREE_OPERAND (expr, 0));
6187 /* The FEs may end up building ADDR_EXPRs early on a decl with
6188 an incomplete type. Re-build ADDR_EXPRs in canonical form
6189 here. */
6190 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6191 *expr_p = build_fold_addr_expr (op0);
6193 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6194 recompute_tree_invariant_for_addr_expr (*expr_p);
6196 /* If we re-built the ADDR_EXPR add a conversion to the original type
6197 if required. */
6198 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6199 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6201 break;
6204 return ret;
6207 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6208 value; output operands should be a gimple lvalue. */
6210 static enum gimplify_status
6211 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6213 tree expr;
6214 int noutputs;
6215 const char **oconstraints;
6216 int i;
6217 tree link;
6218 const char *constraint;
6219 bool allows_mem, allows_reg, is_inout;
6220 enum gimplify_status ret, tret;
6221 gasm *stmt;
6222 vec<tree, va_gc> *inputs;
6223 vec<tree, va_gc> *outputs;
6224 vec<tree, va_gc> *clobbers;
6225 vec<tree, va_gc> *labels;
6226 tree link_next;
6228 expr = *expr_p;
6229 noutputs = list_length (ASM_OUTPUTS (expr));
6230 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6232 inputs = NULL;
6233 outputs = NULL;
6234 clobbers = NULL;
6235 labels = NULL;
6237 ret = GS_ALL_DONE;
6238 link_next = NULL_TREE;
6239 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6241 bool ok;
6242 size_t constraint_len;
6244 link_next = TREE_CHAIN (link);
6246 oconstraints[i]
6247 = constraint
6248 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6249 constraint_len = strlen (constraint);
6250 if (constraint_len == 0)
6251 continue;
6253 ok = parse_output_constraint (&constraint, i, 0, 0,
6254 &allows_mem, &allows_reg, &is_inout);
6255 if (!ok)
6257 ret = GS_ERROR;
6258 is_inout = false;
6261 /* If we can't make copies, we can only accept memory.
6262 Similarly for VLAs. */
6263 tree outtype = TREE_TYPE (TREE_VALUE (link));
6264 if (outtype != error_mark_node
6265 && (TREE_ADDRESSABLE (outtype)
6266 || !COMPLETE_TYPE_P (outtype)
6267 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6269 if (allows_mem)
6270 allows_reg = 0;
6271 else
6273 error ("impossible constraint in %<asm%>");
6274 error ("non-memory output %d must stay in memory", i);
6275 return GS_ERROR;
6279 if (!allows_reg && allows_mem)
6280 mark_addressable (TREE_VALUE (link));
6282 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6283 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6284 fb_lvalue | fb_mayfail);
6285 if (tret == GS_ERROR)
6287 error ("invalid lvalue in %<asm%> output %d", i);
6288 ret = tret;
6291 /* If the constraint does not allow memory make sure we gimplify
6292 it to a register if it is not already but its base is. This
6293 happens for complex and vector components. */
6294 if (!allows_mem)
6296 tree op = TREE_VALUE (link);
6297 if (! is_gimple_val (op)
6298 && is_gimple_reg_type (TREE_TYPE (op))
6299 && is_gimple_reg (get_base_address (op)))
6301 tree tem = create_tmp_reg (TREE_TYPE (op));
6302 tree ass;
6303 if (is_inout)
6305 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6306 tem, unshare_expr (op));
6307 gimplify_and_add (ass, pre_p);
6309 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6310 gimplify_and_add (ass, post_p);
6312 TREE_VALUE (link) = tem;
6313 tret = GS_OK;
6317 vec_safe_push (outputs, link);
6318 TREE_CHAIN (link) = NULL_TREE;
6320 if (is_inout)
6322 /* An input/output operand. To give the optimizers more
6323 flexibility, split it into separate input and output
6324 operands. */
6325 tree input;
6326 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6327 char buf[11];
6329 /* Turn the in/out constraint into an output constraint. */
6330 char *p = xstrdup (constraint);
6331 p[0] = '=';
6332 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6334 /* And add a matching input constraint. */
6335 if (allows_reg)
6337 sprintf (buf, "%u", i);
6339 /* If there are multiple alternatives in the constraint,
6340 handle each of them individually. Those that allow register
6341 will be replaced with operand number, the others will stay
6342 unchanged. */
6343 if (strchr (p, ',') != NULL)
6345 size_t len = 0, buflen = strlen (buf);
6346 char *beg, *end, *str, *dst;
6348 for (beg = p + 1;;)
6350 end = strchr (beg, ',');
6351 if (end == NULL)
6352 end = strchr (beg, '\0');
6353 if ((size_t) (end - beg) < buflen)
6354 len += buflen + 1;
6355 else
6356 len += end - beg + 1;
6357 if (*end)
6358 beg = end + 1;
6359 else
6360 break;
6363 str = (char *) alloca (len);
6364 for (beg = p + 1, dst = str;;)
6366 const char *tem;
6367 bool mem_p, reg_p, inout_p;
6369 end = strchr (beg, ',');
6370 if (end)
6371 *end = '\0';
6372 beg[-1] = '=';
6373 tem = beg - 1;
6374 parse_output_constraint (&tem, i, 0, 0,
6375 &mem_p, &reg_p, &inout_p);
6376 if (dst != str)
6377 *dst++ = ',';
6378 if (reg_p)
6380 memcpy (dst, buf, buflen);
6381 dst += buflen;
6383 else
6385 if (end)
6386 len = end - beg;
6387 else
6388 len = strlen (beg);
6389 memcpy (dst, beg, len);
6390 dst += len;
6392 if (end)
6393 beg = end + 1;
6394 else
6395 break;
6397 *dst = '\0';
6398 input = build_string (dst - str, str);
6400 else
6401 input = build_string (strlen (buf), buf);
6403 else
6404 input = build_string (constraint_len - 1, constraint + 1);
6406 free (p);
6408 input = build_tree_list (build_tree_list (NULL_TREE, input),
6409 unshare_expr (TREE_VALUE (link)));
6410 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6414 link_next = NULL_TREE;
6415 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6417 link_next = TREE_CHAIN (link);
6418 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6419 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6420 oconstraints, &allows_mem, &allows_reg);
6422 /* If we can't make copies, we can only accept memory. */
6423 tree intype = TREE_TYPE (TREE_VALUE (link));
6424 if (intype != error_mark_node
6425 && (TREE_ADDRESSABLE (intype)
6426 || !COMPLETE_TYPE_P (intype)
6427 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6429 if (allows_mem)
6430 allows_reg = 0;
6431 else
6433 error ("impossible constraint in %<asm%>");
6434 error ("non-memory input %d must stay in memory", i);
6435 return GS_ERROR;
6439 /* If the operand is a memory input, it should be an lvalue. */
6440 if (!allows_reg && allows_mem)
6442 tree inputv = TREE_VALUE (link);
6443 STRIP_NOPS (inputv);
6444 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6445 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6446 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6447 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6448 || TREE_CODE (inputv) == MODIFY_EXPR)
6449 TREE_VALUE (link) = error_mark_node;
6450 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6451 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6452 if (tret != GS_ERROR)
6454 /* Unlike output operands, memory inputs are not guaranteed
6455 to be lvalues by the FE, and while the expressions are
6456 marked addressable there, if it is e.g. a statement
6457 expression, temporaries in it might not end up being
6458 addressable. They might be already used in the IL and thus
6459 it is too late to make them addressable now though. */
6460 tree x = TREE_VALUE (link);
6461 while (handled_component_p (x))
6462 x = TREE_OPERAND (x, 0);
6463 if (TREE_CODE (x) == MEM_REF
6464 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6465 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6466 if ((VAR_P (x)
6467 || TREE_CODE (x) == PARM_DECL
6468 || TREE_CODE (x) == RESULT_DECL)
6469 && !TREE_ADDRESSABLE (x)
6470 && is_gimple_reg (x))
6472 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6473 input_location), 0,
6474 "memory input %d is not directly addressable",
6476 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6479 mark_addressable (TREE_VALUE (link));
6480 if (tret == GS_ERROR)
6482 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6483 "memory input %d is not directly addressable", i);
6484 ret = tret;
6487 else
6489 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6490 is_gimple_asm_val, fb_rvalue);
6491 if (tret == GS_ERROR)
6492 ret = tret;
6495 TREE_CHAIN (link) = NULL_TREE;
6496 vec_safe_push (inputs, link);
6499 link_next = NULL_TREE;
6500 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6502 link_next = TREE_CHAIN (link);
6503 TREE_CHAIN (link) = NULL_TREE;
6504 vec_safe_push (clobbers, link);
6507 link_next = NULL_TREE;
6508 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6510 link_next = TREE_CHAIN (link);
6511 TREE_CHAIN (link) = NULL_TREE;
6512 vec_safe_push (labels, link);
6515 /* Do not add ASMs with errors to the gimple IL stream. */
6516 if (ret != GS_ERROR)
6518 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6519 inputs, outputs, clobbers, labels);
6521 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6522 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6523 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6525 gimplify_seq_add_stmt (pre_p, stmt);
6528 return ret;
6531 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6532 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6533 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6534 return to this function.
6536 FIXME should we complexify the prequeue handling instead? Or use flags
6537 for all the cleanups and let the optimizer tighten them up? The current
6538 code seems pretty fragile; it will break on a cleanup within any
6539 non-conditional nesting. But any such nesting would be broken, anyway;
6540 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6541 and continues out of it. We can do that at the RTL level, though, so
6542 having an optimizer to tighten up try/finally regions would be a Good
6543 Thing. */
6545 static enum gimplify_status
6546 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6548 gimple_stmt_iterator iter;
6549 gimple_seq body_sequence = NULL;
6551 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6553 /* We only care about the number of conditions between the innermost
6554 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6555 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6556 int old_conds = gimplify_ctxp->conditions;
6557 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6558 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6559 gimplify_ctxp->conditions = 0;
6560 gimplify_ctxp->conditional_cleanups = NULL;
6561 gimplify_ctxp->in_cleanup_point_expr = true;
6563 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6565 gimplify_ctxp->conditions = old_conds;
6566 gimplify_ctxp->conditional_cleanups = old_cleanups;
6567 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6569 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6571 gimple *wce = gsi_stmt (iter);
6573 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6575 if (gsi_one_before_end_p (iter))
6577 /* Note that gsi_insert_seq_before and gsi_remove do not
6578 scan operands, unlike some other sequence mutators. */
6579 if (!gimple_wce_cleanup_eh_only (wce))
6580 gsi_insert_seq_before_without_update (&iter,
6581 gimple_wce_cleanup (wce),
6582 GSI_SAME_STMT);
6583 gsi_remove (&iter, true);
6584 break;
6586 else
6588 gtry *gtry;
6589 gimple_seq seq;
6590 enum gimple_try_flags kind;
6592 if (gimple_wce_cleanup_eh_only (wce))
6593 kind = GIMPLE_TRY_CATCH;
6594 else
6595 kind = GIMPLE_TRY_FINALLY;
6596 seq = gsi_split_seq_after (iter);
6598 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6599 /* Do not use gsi_replace here, as it may scan operands.
6600 We want to do a simple structural modification only. */
6601 gsi_set_stmt (&iter, gtry);
6602 iter = gsi_start (gtry->eval);
6605 else
6606 gsi_next (&iter);
6609 gimplify_seq_add_seq (pre_p, body_sequence);
6610 if (temp)
6612 *expr_p = temp;
6613 return GS_OK;
6615 else
6617 *expr_p = NULL;
6618 return GS_ALL_DONE;
6622 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6623 is the cleanup action required. EH_ONLY is true if the cleanup should
6624 only be executed if an exception is thrown, not on normal exit.
6625 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6626 only valid for clobbers. */
6628 static void
6629 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6630 bool force_uncond = false)
6632 gimple *wce;
6633 gimple_seq cleanup_stmts = NULL;
6635 /* Errors can result in improperly nested cleanups. Which results in
6636 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6637 if (seen_error ())
6638 return;
6640 if (gimple_conditional_context ())
6642 /* If we're in a conditional context, this is more complex. We only
6643 want to run the cleanup if we actually ran the initialization that
6644 necessitates it, but we want to run it after the end of the
6645 conditional context. So we wrap the try/finally around the
6646 condition and use a flag to determine whether or not to actually
6647 run the destructor. Thus
6649 test ? f(A()) : 0
6651 becomes (approximately)
6653 flag = 0;
6654 try {
6655 if (test) { A::A(temp); flag = 1; val = f(temp); }
6656 else { val = 0; }
6657 } finally {
6658 if (flag) A::~A(temp);
6662 if (force_uncond)
6664 gimplify_stmt (&cleanup, &cleanup_stmts);
6665 wce = gimple_build_wce (cleanup_stmts);
6666 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6668 else
6670 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6671 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6672 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6674 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6675 gimplify_stmt (&cleanup, &cleanup_stmts);
6676 wce = gimple_build_wce (cleanup_stmts);
6678 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6679 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6680 gimplify_seq_add_stmt (pre_p, ftrue);
6682 /* Because of this manipulation, and the EH edges that jump
6683 threading cannot redirect, the temporary (VAR) will appear
6684 to be used uninitialized. Don't warn. */
6685 TREE_NO_WARNING (var) = 1;
6688 else
6690 gimplify_stmt (&cleanup, &cleanup_stmts);
6691 wce = gimple_build_wce (cleanup_stmts);
6692 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6693 gimplify_seq_add_stmt (pre_p, wce);
6697 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6699 static enum gimplify_status
6700 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6702 tree targ = *expr_p;
6703 tree temp = TARGET_EXPR_SLOT (targ);
6704 tree init = TARGET_EXPR_INITIAL (targ);
6705 enum gimplify_status ret;
6707 bool unpoison_empty_seq = false;
6708 gimple_stmt_iterator unpoison_it;
6710 if (init)
6712 tree cleanup = NULL_TREE;
6714 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6715 to the temps list. Handle also variable length TARGET_EXPRs. */
6716 if (!poly_int_tree_p (DECL_SIZE (temp)))
6718 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6719 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6720 gimplify_vla_decl (temp, pre_p);
6722 else
6724 /* Save location where we need to place unpoisoning. It's possible
6725 that a variable will be converted to needs_to_live_in_memory. */
6726 unpoison_it = gsi_last (*pre_p);
6727 unpoison_empty_seq = gsi_end_p (unpoison_it);
6729 gimple_add_tmp_var (temp);
6732 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6733 expression is supposed to initialize the slot. */
6734 if (VOID_TYPE_P (TREE_TYPE (init)))
6735 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6736 else
6738 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6739 init = init_expr;
6740 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6741 init = NULL;
6742 ggc_free (init_expr);
6744 if (ret == GS_ERROR)
6746 /* PR c++/28266 Make sure this is expanded only once. */
6747 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6748 return GS_ERROR;
6750 if (init)
6751 gimplify_and_add (init, pre_p);
6753 /* If needed, push the cleanup for the temp. */
6754 if (TARGET_EXPR_CLEANUP (targ))
6756 if (CLEANUP_EH_ONLY (targ))
6757 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6758 CLEANUP_EH_ONLY (targ), pre_p);
6759 else
6760 cleanup = TARGET_EXPR_CLEANUP (targ);
6763 /* Add a clobber for the temporary going out of scope, like
6764 gimplify_bind_expr. */
6765 if (gimplify_ctxp->in_cleanup_point_expr
6766 && needs_to_live_in_memory (temp))
6768 if (flag_stack_reuse == SR_ALL)
6770 tree clobber = build_clobber (TREE_TYPE (temp));
6771 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6772 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6774 if (asan_poisoned_variables
6775 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6776 && !TREE_STATIC (temp)
6777 && dbg_cnt (asan_use_after_scope)
6778 && !gimplify_omp_ctxp)
6780 tree asan_cleanup = build_asan_poison_call_expr (temp);
6781 if (asan_cleanup)
6783 if (unpoison_empty_seq)
6784 unpoison_it = gsi_start (*pre_p);
6786 asan_poison_variable (temp, false, &unpoison_it,
6787 unpoison_empty_seq);
6788 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6792 if (cleanup)
6793 gimple_push_cleanup (temp, cleanup, false, pre_p);
6795 /* Only expand this once. */
6796 TREE_OPERAND (targ, 3) = init;
6797 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6799 else
6800 /* We should have expanded this before. */
6801 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6803 *expr_p = temp;
6804 return GS_OK;
6807 /* Gimplification of expression trees. */
6809 /* Gimplify an expression which appears at statement context. The
6810 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6811 NULL, a new sequence is allocated.
6813 Return true if we actually added a statement to the queue. */
6815 bool
6816 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6818 gimple_seq_node last;
6820 last = gimple_seq_last (*seq_p);
6821 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6822 return last != gimple_seq_last (*seq_p);
6825 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6826 to CTX. If entries already exist, force them to be some flavor of private.
6827 If there is no enclosing parallel, do nothing. */
6829 void
6830 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6832 splay_tree_node n;
6834 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6835 return;
6839 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6840 if (n != NULL)
6842 if (n->value & GOVD_SHARED)
6843 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6844 else if (n->value & GOVD_MAP)
6845 n->value |= GOVD_MAP_TO_ONLY;
6846 else
6847 return;
6849 else if ((ctx->region_type & ORT_TARGET) != 0)
6851 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6852 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6853 else
6854 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6856 else if (ctx->region_type != ORT_WORKSHARE
6857 && ctx->region_type != ORT_TASKGROUP
6858 && ctx->region_type != ORT_SIMD
6859 && ctx->region_type != ORT_ACC
6860 && !(ctx->region_type & ORT_TARGET_DATA))
6861 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6863 ctx = ctx->outer_context;
6865 while (ctx);
6868 /* Similarly for each of the type sizes of TYPE. */
6870 static void
6871 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6873 if (type == NULL || type == error_mark_node)
6874 return;
6875 type = TYPE_MAIN_VARIANT (type);
6877 if (ctx->privatized_types->add (type))
6878 return;
6880 switch (TREE_CODE (type))
6882 case INTEGER_TYPE:
6883 case ENUMERAL_TYPE:
6884 case BOOLEAN_TYPE:
6885 case REAL_TYPE:
6886 case FIXED_POINT_TYPE:
6887 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6888 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6889 break;
6891 case ARRAY_TYPE:
6892 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6893 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6894 break;
6896 case RECORD_TYPE:
6897 case UNION_TYPE:
6898 case QUAL_UNION_TYPE:
6900 tree field;
6901 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6902 if (TREE_CODE (field) == FIELD_DECL)
6904 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6905 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6908 break;
6910 case POINTER_TYPE:
6911 case REFERENCE_TYPE:
6912 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6913 break;
6915 default:
6916 break;
6919 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6920 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6921 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6924 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6926 static void
6927 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6929 splay_tree_node n;
6930 unsigned int nflags;
6931 tree t;
6933 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6934 return;
6936 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6937 there are constructors involved somewhere. Exception is a shared clause,
6938 there is nothing privatized in that case. */
6939 if ((flags & GOVD_SHARED) == 0
6940 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6941 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6942 flags |= GOVD_SEEN;
6944 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6945 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6947 /* We shouldn't be re-adding the decl with the same data
6948 sharing class. */
6949 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6950 nflags = n->value | flags;
6951 /* The only combination of data sharing classes we should see is
6952 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6953 reduction variables to be used in data sharing clauses. */
6954 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6955 || ((nflags & GOVD_DATA_SHARE_CLASS)
6956 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6957 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6958 n->value = nflags;
6959 return;
6962 /* When adding a variable-sized variable, we have to handle all sorts
6963 of additional bits of data: the pointer replacement variable, and
6964 the parameters of the type. */
6965 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6967 /* Add the pointer replacement variable as PRIVATE if the variable
6968 replacement is private, else FIRSTPRIVATE since we'll need the
6969 address of the original variable either for SHARED, or for the
6970 copy into or out of the context. */
6971 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
6973 if (flags & GOVD_MAP)
6974 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6975 else if (flags & GOVD_PRIVATE)
6976 nflags = GOVD_PRIVATE;
6977 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6978 && (flags & GOVD_FIRSTPRIVATE))
6979 || (ctx->region_type == ORT_TARGET_DATA
6980 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
6981 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6982 else
6983 nflags = GOVD_FIRSTPRIVATE;
6984 nflags |= flags & GOVD_SEEN;
6985 t = DECL_VALUE_EXPR (decl);
6986 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6987 t = TREE_OPERAND (t, 0);
6988 gcc_assert (DECL_P (t));
6989 omp_add_variable (ctx, t, nflags);
6992 /* Add all of the variable and type parameters (which should have
6993 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6994 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6995 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6996 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6998 /* The variable-sized variable itself is never SHARED, only some form
6999 of PRIVATE. The sharing would take place via the pointer variable
7000 which we remapped above. */
7001 if (flags & GOVD_SHARED)
7002 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7003 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7005 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7006 alloca statement we generate for the variable, so make sure it
7007 is available. This isn't automatically needed for the SHARED
7008 case, since we won't be allocating local storage then.
7009 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7010 in this case omp_notice_variable will be called later
7011 on when it is gimplified. */
7012 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7013 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7014 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7016 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7017 && lang_hooks.decls.omp_privatize_by_reference (decl))
7019 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7021 /* Similar to the direct variable sized case above, we'll need the
7022 size of references being privatized. */
7023 if ((flags & GOVD_SHARED) == 0)
7025 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7026 if (DECL_P (t))
7027 omp_notice_variable (ctx, t, true);
7031 if (n != NULL)
7032 n->value |= flags;
7033 else
7034 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7036 /* For reductions clauses in OpenACC loop directives, by default create a
7037 copy clause on the enclosing parallel construct for carrying back the
7038 results. */
7039 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7041 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7042 while (outer_ctx)
7044 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7045 if (n != NULL)
7047 /* Ignore local variables and explicitly declared clauses. */
7048 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7049 break;
7050 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7052 /* According to the OpenACC spec, such a reduction variable
7053 should already have a copy map on a kernels construct,
7054 verify that here. */
7055 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7056 && (n->value & GOVD_MAP));
7058 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7060 /* Remove firstprivate and make it a copy map. */
7061 n->value &= ~GOVD_FIRSTPRIVATE;
7062 n->value |= GOVD_MAP;
7065 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7067 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7068 GOVD_MAP | GOVD_SEEN);
7069 break;
7071 outer_ctx = outer_ctx->outer_context;
7076 /* Notice a threadprivate variable DECL used in OMP context CTX.
7077 This just prints out diagnostics about threadprivate variable uses
7078 in untied tasks. If DECL2 is non-NULL, prevent this warning
7079 on that variable. */
7081 static bool
7082 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7083 tree decl2)
7085 splay_tree_node n;
7086 struct gimplify_omp_ctx *octx;
7088 for (octx = ctx; octx; octx = octx->outer_context)
7089 if ((octx->region_type & ORT_TARGET) != 0
7090 || octx->order_concurrent)
7092 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7093 if (n == NULL)
7095 if (octx->order_concurrent)
7097 error ("threadprivate variable %qE used in a region with"
7098 " %<order(concurrent)%> clause", DECL_NAME (decl));
7099 inform (octx->location, "enclosing region");
7101 else
7103 error ("threadprivate variable %qE used in target region",
7104 DECL_NAME (decl));
7105 inform (octx->location, "enclosing target region");
7107 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7109 if (decl2)
7110 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7113 if (ctx->region_type != ORT_UNTIED_TASK)
7114 return false;
7115 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7116 if (n == NULL)
7118 error ("threadprivate variable %qE used in untied task",
7119 DECL_NAME (decl));
7120 inform (ctx->location, "enclosing task");
7121 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7123 if (decl2)
7124 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7125 return false;
7128 /* Return true if global var DECL is device resident. */
7130 static bool
7131 device_resident_p (tree decl)
7133 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7135 if (!attr)
7136 return false;
7138 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7140 tree c = TREE_VALUE (t);
7141 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7142 return true;
7145 return false;
7148 /* Return true if DECL has an ACC DECLARE attribute. */
7150 static bool
7151 is_oacc_declared (tree decl)
7153 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7154 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7155 return declared != NULL_TREE;
7158 /* Determine outer default flags for DECL mentioned in an OMP region
7159 but not declared in an enclosing clause.
7161 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7162 remapped firstprivate instead of shared. To some extent this is
7163 addressed in omp_firstprivatize_type_sizes, but not
7164 effectively. */
7166 static unsigned
7167 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7168 bool in_code, unsigned flags)
7170 enum omp_clause_default_kind default_kind = ctx->default_kind;
7171 enum omp_clause_default_kind kind;
7173 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7174 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7175 default_kind = kind;
7176 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7177 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7179 switch (default_kind)
7181 case OMP_CLAUSE_DEFAULT_NONE:
7183 const char *rtype;
7185 if (ctx->region_type & ORT_PARALLEL)
7186 rtype = "parallel";
7187 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7188 rtype = "taskloop";
7189 else if (ctx->region_type & ORT_TASK)
7190 rtype = "task";
7191 else if (ctx->region_type & ORT_TEAMS)
7192 rtype = "teams";
7193 else
7194 gcc_unreachable ();
7196 error ("%qE not specified in enclosing %qs",
7197 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7198 inform (ctx->location, "enclosing %qs", rtype);
7200 /* FALLTHRU */
7201 case OMP_CLAUSE_DEFAULT_SHARED:
7202 flags |= GOVD_SHARED;
7203 break;
7204 case OMP_CLAUSE_DEFAULT_PRIVATE:
7205 flags |= GOVD_PRIVATE;
7206 break;
7207 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7208 flags |= GOVD_FIRSTPRIVATE;
7209 break;
7210 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7211 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7212 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7213 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7215 omp_notice_variable (octx, decl, in_code);
7216 for (; octx; octx = octx->outer_context)
7218 splay_tree_node n2;
7220 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7221 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7222 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7223 continue;
7224 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7226 flags |= GOVD_FIRSTPRIVATE;
7227 goto found_outer;
7229 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7231 flags |= GOVD_SHARED;
7232 goto found_outer;
7237 if (TREE_CODE (decl) == PARM_DECL
7238 || (!is_global_var (decl)
7239 && DECL_CONTEXT (decl) == current_function_decl))
7240 flags |= GOVD_FIRSTPRIVATE;
7241 else
7242 flags |= GOVD_SHARED;
7243 found_outer:
7244 break;
7246 default:
7247 gcc_unreachable ();
7250 return flags;
7254 /* Determine outer default flags for DECL mentioned in an OACC region
7255 but not declared in an enclosing clause. */
7257 static unsigned
7258 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7260 const char *rkind;
7261 bool on_device = false;
7262 bool is_private = false;
7263 bool declared = is_oacc_declared (decl);
7264 tree type = TREE_TYPE (decl);
7266 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7267 type = TREE_TYPE (type);
7269 /* For Fortran COMMON blocks, only used variables in those blocks are
7270 transfered and remapped. The block itself will have a private clause to
7271 avoid transfering the data twice.
7272 The hook evaluates to false by default. For a variable in Fortran's COMMON
7273 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7274 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7275 the whole block. For C++ and Fortran, it can also be true under certain
7276 other conditions, if DECL_HAS_VALUE_EXPR. */
7277 if (RECORD_OR_UNION_TYPE_P (type))
7278 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7280 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7281 && is_global_var (decl)
7282 && device_resident_p (decl)
7283 && !is_private)
7285 on_device = true;
7286 flags |= GOVD_MAP_TO_ONLY;
7289 switch (ctx->region_type)
7291 case ORT_ACC_KERNELS:
7292 rkind = "kernels";
7294 if (is_private)
7295 flags |= GOVD_FIRSTPRIVATE;
7296 else if (AGGREGATE_TYPE_P (type))
7298 /* Aggregates default to 'present_or_copy', or 'present'. */
7299 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7300 flags |= GOVD_MAP;
7301 else
7302 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7304 else
7305 /* Scalars default to 'copy'. */
7306 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7308 break;
7310 case ORT_ACC_PARALLEL:
7311 case ORT_ACC_SERIAL:
7312 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7314 if (is_private)
7315 flags |= GOVD_FIRSTPRIVATE;
7316 else if (on_device || declared)
7317 flags |= GOVD_MAP;
7318 else if (AGGREGATE_TYPE_P (type))
7320 /* Aggregates default to 'present_or_copy', or 'present'. */
7321 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7322 flags |= GOVD_MAP;
7323 else
7324 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7326 else
7327 /* Scalars default to 'firstprivate'. */
7328 flags |= GOVD_FIRSTPRIVATE;
7330 break;
7332 default:
7333 gcc_unreachable ();
7336 if (DECL_ARTIFICIAL (decl))
7337 ; /* We can get compiler-generated decls, and should not complain
7338 about them. */
7339 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7341 error ("%qE not specified in enclosing OpenACC %qs construct",
7342 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7343 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7345 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7346 ; /* Handled above. */
7347 else
7348 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7350 return flags;
7353 /* Record the fact that DECL was used within the OMP context CTX.
7354 IN_CODE is true when real code uses DECL, and false when we should
7355 merely emit default(none) errors. Return true if DECL is going to
7356 be remapped and thus DECL shouldn't be gimplified into its
7357 DECL_VALUE_EXPR (if any). */
7359 static bool
7360 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7362 splay_tree_node n;
7363 unsigned flags = in_code ? GOVD_SEEN : 0;
7364 bool ret = false, shared;
7366 if (error_operand_p (decl))
7367 return false;
7369 if (ctx->region_type == ORT_NONE)
7370 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7372 if (is_global_var (decl))
7374 /* Threadprivate variables are predetermined. */
7375 if (DECL_THREAD_LOCAL_P (decl))
7376 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7378 if (DECL_HAS_VALUE_EXPR_P (decl))
7380 if (ctx->region_type & ORT_ACC)
7381 /* For OpenACC, defer expansion of value to avoid transfering
7382 privatized common block data instead of im-/explicitly transfered
7383 variables which are in common blocks. */
7385 else
7387 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7389 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7390 return omp_notice_threadprivate_variable (ctx, decl, value);
7394 if (gimplify_omp_ctxp->outer_context == NULL
7395 && VAR_P (decl)
7396 && oacc_get_fn_attrib (current_function_decl))
7398 location_t loc = DECL_SOURCE_LOCATION (decl);
7400 if (lookup_attribute ("omp declare target link",
7401 DECL_ATTRIBUTES (decl)))
7403 error_at (loc,
7404 "%qE with %<link%> clause used in %<routine%> function",
7405 DECL_NAME (decl));
7406 return false;
7408 else if (!lookup_attribute ("omp declare target",
7409 DECL_ATTRIBUTES (decl)))
7411 error_at (loc,
7412 "%qE requires a %<declare%> directive for use "
7413 "in a %<routine%> function", DECL_NAME (decl));
7414 return false;
7419 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7420 if ((ctx->region_type & ORT_TARGET) != 0)
7422 if (ctx->region_type & ORT_ACC)
7423 /* For OpenACC, as remarked above, defer expansion. */
7424 shared = false;
7425 else
7426 shared = true;
7428 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7429 if (n == NULL)
7431 unsigned nflags = flags;
7432 if ((ctx->region_type & ORT_ACC) == 0)
7434 bool is_declare_target = false;
7435 if (is_global_var (decl)
7436 && varpool_node::get_create (decl)->offloadable)
7438 struct gimplify_omp_ctx *octx;
7439 for (octx = ctx->outer_context;
7440 octx; octx = octx->outer_context)
7442 n = splay_tree_lookup (octx->variables,
7443 (splay_tree_key)decl);
7444 if (n
7445 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7446 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7447 break;
7449 is_declare_target = octx == NULL;
7451 if (!is_declare_target)
7453 int gdmk;
7454 enum omp_clause_defaultmap_kind kind;
7455 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7456 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7457 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7458 == POINTER_TYPE)))
7459 gdmk = GDMK_POINTER;
7460 else if (lang_hooks.decls.omp_scalar_p (decl))
7461 gdmk = GDMK_SCALAR;
7462 else
7463 gdmk = GDMK_AGGREGATE;
7464 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
7465 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
7467 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
7468 nflags |= GOVD_FIRSTPRIVATE;
7469 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
7470 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
7471 else
7472 gcc_unreachable ();
7474 else if (ctx->defaultmap[gdmk] == 0)
7476 tree d = lang_hooks.decls.omp_report_decl (decl);
7477 error ("%qE not specified in enclosing %<target%>",
7478 DECL_NAME (d));
7479 inform (ctx->location, "enclosing %<target%>");
7481 else if (ctx->defaultmap[gdmk]
7482 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7483 nflags |= ctx->defaultmap[gdmk];
7484 else
7486 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7487 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7492 struct gimplify_omp_ctx *octx = ctx->outer_context;
7493 if ((ctx->region_type & ORT_ACC) && octx)
7495 /* Look in outer OpenACC contexts, to see if there's a
7496 data attribute for this variable. */
7497 omp_notice_variable (octx, decl, in_code);
7499 for (; octx; octx = octx->outer_context)
7501 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7502 break;
7503 splay_tree_node n2
7504 = splay_tree_lookup (octx->variables,
7505 (splay_tree_key) decl);
7506 if (n2)
7508 if (octx->region_type == ORT_ACC_HOST_DATA)
7509 error ("variable %qE declared in enclosing "
7510 "%<host_data%> region", DECL_NAME (decl));
7511 nflags |= GOVD_MAP;
7512 if (octx->region_type == ORT_ACC_DATA
7513 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7514 nflags |= GOVD_MAP_0LEN_ARRAY;
7515 goto found_outer;
7520 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7521 | GOVD_MAP_ALLOC_ONLY)) == flags)
7523 tree type = TREE_TYPE (decl);
7525 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7526 && lang_hooks.decls.omp_privatize_by_reference (decl))
7527 type = TREE_TYPE (type);
7528 if (!lang_hooks.types.omp_mappable_type (type))
7530 error ("%qD referenced in target region does not have "
7531 "a mappable type", decl);
7532 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7534 else
7536 if ((ctx->region_type & ORT_ACC) != 0)
7537 nflags = oacc_default_clause (ctx, decl, flags);
7538 else
7539 nflags |= GOVD_MAP;
7542 found_outer:
7543 omp_add_variable (ctx, decl, nflags);
7545 else
7547 /* If nothing changed, there's nothing left to do. */
7548 if ((n->value & flags) == flags)
7549 return ret;
7550 flags |= n->value;
7551 n->value = flags;
7553 goto do_outer;
7556 if (n == NULL)
7558 if (ctx->region_type == ORT_WORKSHARE
7559 || ctx->region_type == ORT_TASKGROUP
7560 || ctx->region_type == ORT_SIMD
7561 || ctx->region_type == ORT_ACC
7562 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7563 goto do_outer;
7565 flags = omp_default_clause (ctx, decl, in_code, flags);
7567 if ((flags & GOVD_PRIVATE)
7568 && lang_hooks.decls.omp_private_outer_ref (decl))
7569 flags |= GOVD_PRIVATE_OUTER_REF;
7571 omp_add_variable (ctx, decl, flags);
7573 shared = (flags & GOVD_SHARED) != 0;
7574 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7575 goto do_outer;
7578 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7579 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7580 && DECL_SIZE (decl))
7582 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7584 splay_tree_node n2;
7585 tree t = DECL_VALUE_EXPR (decl);
7586 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7587 t = TREE_OPERAND (t, 0);
7588 gcc_assert (DECL_P (t));
7589 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7590 n2->value |= GOVD_SEEN;
7592 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7593 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7594 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7595 != INTEGER_CST))
7597 splay_tree_node n2;
7598 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7599 gcc_assert (DECL_P (t));
7600 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7601 if (n2)
7602 omp_notice_variable (ctx, t, true);
7606 if (ctx->region_type & ORT_ACC)
7607 /* For OpenACC, as remarked above, defer expansion. */
7608 shared = false;
7609 else
7610 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7611 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7613 /* If nothing changed, there's nothing left to do. */
7614 if ((n->value & flags) == flags)
7615 return ret;
7616 flags |= n->value;
7617 n->value = flags;
7619 do_outer:
7620 /* If the variable is private in the current context, then we don't
7621 need to propagate anything to an outer context. */
7622 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7623 return ret;
7624 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7625 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7626 return ret;
7627 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7628 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7629 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7630 return ret;
7631 if (ctx->outer_context
7632 && omp_notice_variable (ctx->outer_context, decl, in_code))
7633 return true;
7634 return ret;
7637 /* Verify that DECL is private within CTX. If there's specific information
7638 to the contrary in the innermost scope, generate an error. */
7640 static bool
7641 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7643 splay_tree_node n;
7645 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7646 if (n != NULL)
7648 if (n->value & GOVD_SHARED)
7650 if (ctx == gimplify_omp_ctxp)
7652 if (simd)
7653 error ("iteration variable %qE is predetermined linear",
7654 DECL_NAME (decl));
7655 else
7656 error ("iteration variable %qE should be private",
7657 DECL_NAME (decl));
7658 n->value = GOVD_PRIVATE;
7659 return true;
7661 else
7662 return false;
7664 else if ((n->value & GOVD_EXPLICIT) != 0
7665 && (ctx == gimplify_omp_ctxp
7666 || (ctx->region_type == ORT_COMBINED_PARALLEL
7667 && gimplify_omp_ctxp->outer_context == ctx)))
7669 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7670 error ("iteration variable %qE should not be firstprivate",
7671 DECL_NAME (decl));
7672 else if ((n->value & GOVD_REDUCTION) != 0)
7673 error ("iteration variable %qE should not be reduction",
7674 DECL_NAME (decl));
7675 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7676 error ("iteration variable %qE should not be linear",
7677 DECL_NAME (decl));
7679 return (ctx == gimplify_omp_ctxp
7680 || (ctx->region_type == ORT_COMBINED_PARALLEL
7681 && gimplify_omp_ctxp->outer_context == ctx));
7684 if (ctx->region_type != ORT_WORKSHARE
7685 && ctx->region_type != ORT_TASKGROUP
7686 && ctx->region_type != ORT_SIMD
7687 && ctx->region_type != ORT_ACC)
7688 return false;
7689 else if (ctx->outer_context)
7690 return omp_is_private (ctx->outer_context, decl, simd);
7691 return false;
7694 /* Return true if DECL is private within a parallel region
7695 that binds to the current construct's context or in parallel
7696 region's REDUCTION clause. */
7698 static bool
7699 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7701 splay_tree_node n;
7705 ctx = ctx->outer_context;
7706 if (ctx == NULL)
7708 if (is_global_var (decl))
7709 return false;
7711 /* References might be private, but might be shared too,
7712 when checking for copyprivate, assume they might be
7713 private, otherwise assume they might be shared. */
7714 if (copyprivate)
7715 return true;
7717 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7718 return false;
7720 /* Treat C++ privatized non-static data members outside
7721 of the privatization the same. */
7722 if (omp_member_access_dummy_var (decl))
7723 return false;
7725 return true;
7728 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7730 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7731 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7732 continue;
7734 if (n != NULL)
7736 if ((n->value & GOVD_LOCAL) != 0
7737 && omp_member_access_dummy_var (decl))
7738 return false;
7739 return (n->value & GOVD_SHARED) == 0;
7742 while (ctx->region_type == ORT_WORKSHARE
7743 || ctx->region_type == ORT_TASKGROUP
7744 || ctx->region_type == ORT_SIMD
7745 || ctx->region_type == ORT_ACC);
7746 return false;
7749 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7751 static tree
7752 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7754 tree t = *tp;
7756 /* If this node has been visited, unmark it and keep looking. */
7757 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7758 return t;
7760 if (IS_TYPE_OR_DECL_P (t))
7761 *walk_subtrees = 0;
7762 return NULL_TREE;
7765 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7766 lower all the depend clauses by populating corresponding depend
7767 array. Returns 0 if there are no such depend clauses, or
7768 2 if all depend clauses should be removed, 1 otherwise. */
7770 static int
7771 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7773 tree c;
7774 gimple *g;
7775 size_t n[4] = { 0, 0, 0, 0 };
7776 bool unused[4];
7777 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7778 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7779 size_t i, j;
7780 location_t first_loc = UNKNOWN_LOCATION;
7782 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7783 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7785 switch (OMP_CLAUSE_DEPEND_KIND (c))
7787 case OMP_CLAUSE_DEPEND_IN:
7788 i = 2;
7789 break;
7790 case OMP_CLAUSE_DEPEND_OUT:
7791 case OMP_CLAUSE_DEPEND_INOUT:
7792 i = 0;
7793 break;
7794 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7795 i = 1;
7796 break;
7797 case OMP_CLAUSE_DEPEND_DEPOBJ:
7798 i = 3;
7799 break;
7800 case OMP_CLAUSE_DEPEND_SOURCE:
7801 case OMP_CLAUSE_DEPEND_SINK:
7802 continue;
7803 default:
7804 gcc_unreachable ();
7806 tree t = OMP_CLAUSE_DECL (c);
7807 if (first_loc == UNKNOWN_LOCATION)
7808 first_loc = OMP_CLAUSE_LOCATION (c);
7809 if (TREE_CODE (t) == TREE_LIST
7810 && TREE_PURPOSE (t)
7811 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7813 if (TREE_PURPOSE (t) != last_iter)
7815 tree tcnt = size_one_node;
7816 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7818 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7819 is_gimple_val, fb_rvalue) == GS_ERROR
7820 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7821 is_gimple_val, fb_rvalue) == GS_ERROR
7822 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7823 is_gimple_val, fb_rvalue) == GS_ERROR
7824 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7825 is_gimple_val, fb_rvalue)
7826 == GS_ERROR))
7827 return 2;
7828 tree var = TREE_VEC_ELT (it, 0);
7829 tree begin = TREE_VEC_ELT (it, 1);
7830 tree end = TREE_VEC_ELT (it, 2);
7831 tree step = TREE_VEC_ELT (it, 3);
7832 tree orig_step = TREE_VEC_ELT (it, 4);
7833 tree type = TREE_TYPE (var);
7834 tree stype = TREE_TYPE (step);
7835 location_t loc = DECL_SOURCE_LOCATION (var);
7836 tree endmbegin;
7837 /* Compute count for this iterator as
7838 orig_step > 0
7839 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7840 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7841 and compute product of those for the entire depend
7842 clause. */
7843 if (POINTER_TYPE_P (type))
7844 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7845 stype, end, begin);
7846 else
7847 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7848 end, begin);
7849 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7850 step,
7851 build_int_cst (stype, 1));
7852 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7853 build_int_cst (stype, 1));
7854 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7855 unshare_expr (endmbegin),
7856 stepm1);
7857 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7858 pos, step);
7859 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7860 endmbegin, stepp1);
7861 if (TYPE_UNSIGNED (stype))
7863 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7864 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7866 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7867 neg, step);
7868 step = NULL_TREE;
7869 tree cond = fold_build2_loc (loc, LT_EXPR,
7870 boolean_type_node,
7871 begin, end);
7872 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7873 build_int_cst (stype, 0));
7874 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7875 end, begin);
7876 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7877 build_int_cst (stype, 0));
7878 tree osteptype = TREE_TYPE (orig_step);
7879 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7880 orig_step,
7881 build_int_cst (osteptype, 0));
7882 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7883 cond, pos, neg);
7884 cnt = fold_convert_loc (loc, sizetype, cnt);
7885 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7886 fb_rvalue) == GS_ERROR)
7887 return 2;
7888 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7890 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7891 fb_rvalue) == GS_ERROR)
7892 return 2;
7893 last_iter = TREE_PURPOSE (t);
7894 last_count = tcnt;
7896 if (counts[i] == NULL_TREE)
7897 counts[i] = last_count;
7898 else
7899 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7900 PLUS_EXPR, counts[i], last_count);
7902 else
7903 n[i]++;
7905 for (i = 0; i < 4; i++)
7906 if (counts[i])
7907 break;
7908 if (i == 4)
7909 return 0;
7911 tree total = size_zero_node;
7912 for (i = 0; i < 4; i++)
7914 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7915 if (counts[i] == NULL_TREE)
7916 counts[i] = size_zero_node;
7917 if (n[i])
7918 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7919 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7920 fb_rvalue) == GS_ERROR)
7921 return 2;
7922 total = size_binop (PLUS_EXPR, total, counts[i]);
7925 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7926 == GS_ERROR)
7927 return 2;
7928 bool is_old = unused[1] && unused[3];
7929 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7930 size_int (is_old ? 1 : 4));
7931 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7932 tree array = create_tmp_var_raw (type);
7933 TREE_ADDRESSABLE (array) = 1;
7934 if (!poly_int_tree_p (totalpx))
7936 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7937 gimplify_type_sizes (TREE_TYPE (array), pre_p);
7938 if (gimplify_omp_ctxp)
7940 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7941 while (ctx
7942 && (ctx->region_type == ORT_WORKSHARE
7943 || ctx->region_type == ORT_TASKGROUP
7944 || ctx->region_type == ORT_SIMD
7945 || ctx->region_type == ORT_ACC))
7946 ctx = ctx->outer_context;
7947 if (ctx)
7948 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7950 gimplify_vla_decl (array, pre_p);
7952 else
7953 gimple_add_tmp_var (array);
7954 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7955 NULL_TREE);
7956 tree tem;
7957 if (!is_old)
7959 tem = build2 (MODIFY_EXPR, void_type_node, r,
7960 build_int_cst (ptr_type_node, 0));
7961 gimplify_and_add (tem, pre_p);
7962 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7963 NULL_TREE);
7965 tem = build2 (MODIFY_EXPR, void_type_node, r,
7966 fold_convert (ptr_type_node, total));
7967 gimplify_and_add (tem, pre_p);
7968 for (i = 1; i < (is_old ? 2 : 4); i++)
7970 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
7971 NULL_TREE, NULL_TREE);
7972 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
7973 gimplify_and_add (tem, pre_p);
7976 tree cnts[4];
7977 for (j = 4; j; j--)
7978 if (!unused[j - 1])
7979 break;
7980 for (i = 0; i < 4; i++)
7982 if (i && (i >= j || unused[i - 1]))
7984 cnts[i] = cnts[i - 1];
7985 continue;
7987 cnts[i] = create_tmp_var (sizetype);
7988 if (i == 0)
7989 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
7990 else
7992 tree t;
7993 if (is_old)
7994 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
7995 else
7996 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
7997 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
7998 == GS_ERROR)
7999 return 2;
8000 g = gimple_build_assign (cnts[i], t);
8002 gimple_seq_add_stmt (pre_p, g);
8005 last_iter = NULL_TREE;
8006 tree last_bind = NULL_TREE;
8007 tree *last_body = NULL;
8008 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8009 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8011 switch (OMP_CLAUSE_DEPEND_KIND (c))
8013 case OMP_CLAUSE_DEPEND_IN:
8014 i = 2;
8015 break;
8016 case OMP_CLAUSE_DEPEND_OUT:
8017 case OMP_CLAUSE_DEPEND_INOUT:
8018 i = 0;
8019 break;
8020 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8021 i = 1;
8022 break;
8023 case OMP_CLAUSE_DEPEND_DEPOBJ:
8024 i = 3;
8025 break;
8026 case OMP_CLAUSE_DEPEND_SOURCE:
8027 case OMP_CLAUSE_DEPEND_SINK:
8028 continue;
8029 default:
8030 gcc_unreachable ();
8032 tree t = OMP_CLAUSE_DECL (c);
8033 if (TREE_CODE (t) == TREE_LIST
8034 && TREE_PURPOSE (t)
8035 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8037 if (TREE_PURPOSE (t) != last_iter)
8039 if (last_bind)
8040 gimplify_and_add (last_bind, pre_p);
8041 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8042 last_bind = build3 (BIND_EXPR, void_type_node,
8043 BLOCK_VARS (block), NULL, block);
8044 TREE_SIDE_EFFECTS (last_bind) = 1;
8045 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8046 tree *p = &BIND_EXPR_BODY (last_bind);
8047 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8049 tree var = TREE_VEC_ELT (it, 0);
8050 tree begin = TREE_VEC_ELT (it, 1);
8051 tree end = TREE_VEC_ELT (it, 2);
8052 tree step = TREE_VEC_ELT (it, 3);
8053 tree orig_step = TREE_VEC_ELT (it, 4);
8054 tree type = TREE_TYPE (var);
8055 location_t loc = DECL_SOURCE_LOCATION (var);
8056 /* Emit:
8057 var = begin;
8058 goto cond_label;
8059 beg_label:
8061 var = var + step;
8062 cond_label:
8063 if (orig_step > 0) {
8064 if (var < end) goto beg_label;
8065 } else {
8066 if (var > end) goto beg_label;
8068 for each iterator, with inner iterators added to
8069 the ... above. */
8070 tree beg_label = create_artificial_label (loc);
8071 tree cond_label = NULL_TREE;
8072 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8073 var, begin);
8074 append_to_statement_list_force (tem, p);
8075 tem = build_and_jump (&cond_label);
8076 append_to_statement_list_force (tem, p);
8077 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8078 append_to_statement_list (tem, p);
8079 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8080 NULL_TREE, NULL_TREE);
8081 TREE_SIDE_EFFECTS (bind) = 1;
8082 SET_EXPR_LOCATION (bind, loc);
8083 append_to_statement_list_force (bind, p);
8084 if (POINTER_TYPE_P (type))
8085 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8086 var, fold_convert_loc (loc, sizetype,
8087 step));
8088 else
8089 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8090 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8091 var, tem);
8092 append_to_statement_list_force (tem, p);
8093 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8094 append_to_statement_list (tem, p);
8095 tree cond = fold_build2_loc (loc, LT_EXPR,
8096 boolean_type_node,
8097 var, end);
8098 tree pos
8099 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8100 cond, build_and_jump (&beg_label),
8101 void_node);
8102 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8103 var, end);
8104 tree neg
8105 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8106 cond, build_and_jump (&beg_label),
8107 void_node);
8108 tree osteptype = TREE_TYPE (orig_step);
8109 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8110 orig_step,
8111 build_int_cst (osteptype, 0));
8112 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8113 cond, pos, neg);
8114 append_to_statement_list_force (tem, p);
8115 p = &BIND_EXPR_BODY (bind);
8117 last_body = p;
8119 last_iter = TREE_PURPOSE (t);
8120 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8122 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8123 0), last_body);
8124 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8126 if (error_operand_p (TREE_VALUE (t)))
8127 return 2;
8128 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8129 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8130 NULL_TREE, NULL_TREE);
8131 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8132 void_type_node, r, TREE_VALUE (t));
8133 append_to_statement_list_force (tem, last_body);
8134 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8135 void_type_node, cnts[i],
8136 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
8137 append_to_statement_list_force (tem, last_body);
8138 TREE_VALUE (t) = null_pointer_node;
8140 else
8142 if (last_bind)
8144 gimplify_and_add (last_bind, pre_p);
8145 last_bind = NULL_TREE;
8147 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8149 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8150 NULL, is_gimple_val, fb_rvalue);
8151 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8153 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8154 return 2;
8155 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8156 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8157 is_gimple_val, fb_rvalue) == GS_ERROR)
8158 return 2;
8159 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8160 NULL_TREE, NULL_TREE);
8161 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8162 gimplify_and_add (tem, pre_p);
8163 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
8164 size_int (1)));
8165 gimple_seq_add_stmt (pre_p, g);
8168 if (last_bind)
8169 gimplify_and_add (last_bind, pre_p);
8170 tree cond = boolean_false_node;
8171 if (is_old)
8173 if (!unused[0])
8174 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8175 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8176 size_int (2)));
8177 if (!unused[2])
8178 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8179 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8180 cnts[2],
8181 size_binop_loc (first_loc, PLUS_EXPR,
8182 totalpx,
8183 size_int (1))));
8185 else
8187 tree prev = size_int (5);
8188 for (i = 0; i < 4; i++)
8190 if (unused[i])
8191 continue;
8192 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8193 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8194 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8195 cnts[i], unshare_expr (prev)));
8198 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8199 build_call_expr_loc (first_loc,
8200 builtin_decl_explicit (BUILT_IN_TRAP),
8201 0), void_node);
8202 gimplify_and_add (tem, pre_p);
8203 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8204 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8205 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8206 OMP_CLAUSE_CHAIN (c) = *list_p;
8207 *list_p = c;
8208 return 1;
8211 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8212 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8213 the struct node to insert the new mapping after (when the struct node is
8214 initially created). PREV_NODE is the first of two or three mappings for a
8215 pointer, and is either:
8216 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8217 array section.
8218 - not the node before C. This is true when we have a reference-to-pointer
8219 type (with a mapping for the reference and for the pointer), or for
8220 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8221 If SCP is non-null, the new node is inserted before *SCP.
8222 if SCP is null, the new node is inserted before PREV_NODE.
8223 The return type is:
8224 - PREV_NODE, if SCP is non-null.
8225 - The newly-created ALLOC or RELEASE node, if SCP is null.
8226 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8227 reference to a pointer. */
8229 static tree
8230 insert_struct_comp_map (enum tree_code code, tree c, tree struct_node,
8231 tree prev_node, tree *scp)
8233 enum gomp_map_kind mkind
8234 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8235 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8237 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8238 tree cl = scp ? prev_node : c2;
8239 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8240 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (c));
8241 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : prev_node;
8242 if (OMP_CLAUSE_CHAIN (prev_node) != c
8243 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8244 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8245 == GOMP_MAP_TO_PSET))
8246 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node));
8247 else
8248 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8249 if (struct_node)
8250 OMP_CLAUSE_CHAIN (struct_node) = c2;
8252 /* We might need to create an additional mapping if we have a reference to a
8253 pointer (in C++). Don't do this if we have something other than a
8254 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8255 if (OMP_CLAUSE_CHAIN (prev_node) != c
8256 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8257 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8258 == GOMP_MAP_ALWAYS_POINTER)
8259 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8260 == GOMP_MAP_ATTACH_DETACH)))
8262 tree c4 = OMP_CLAUSE_CHAIN (prev_node);
8263 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8264 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8265 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (c4));
8266 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8267 OMP_CLAUSE_CHAIN (c3) = prev_node;
8268 if (!scp)
8269 OMP_CLAUSE_CHAIN (c2) = c3;
8270 else
8271 cl = c3;
8274 if (scp)
8275 *scp = c2;
8277 return cl;
8280 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8281 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8282 If BASE_REF is non-NULL and the containing object is a reference, set
8283 *BASE_REF to that reference before dereferencing the object.
8284 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8285 has array type, else return NULL. */
8287 static tree
8288 extract_base_bit_offset (tree base, tree *base_ref, poly_int64 *bitposp,
8289 poly_offset_int *poffsetp)
8291 tree offset;
8292 poly_int64 bitsize, bitpos;
8293 machine_mode mode;
8294 int unsignedp, reversep, volatilep = 0;
8295 poly_offset_int poffset;
8297 if (base_ref)
8299 *base_ref = NULL_TREE;
8301 while (TREE_CODE (base) == ARRAY_REF)
8302 base = TREE_OPERAND (base, 0);
8304 if (TREE_CODE (base) == INDIRECT_REF)
8305 base = TREE_OPERAND (base, 0);
8307 else
8309 if (TREE_CODE (base) == ARRAY_REF)
8311 while (TREE_CODE (base) == ARRAY_REF)
8312 base = TREE_OPERAND (base, 0);
8313 if (TREE_CODE (base) != COMPONENT_REF
8314 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE)
8315 return NULL_TREE;
8317 else if (TREE_CODE (base) == INDIRECT_REF
8318 && TREE_CODE (TREE_OPERAND (base, 0)) == COMPONENT_REF
8319 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8320 == REFERENCE_TYPE))
8321 base = TREE_OPERAND (base, 0);
8324 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8325 &unsignedp, &reversep, &volatilep);
8327 tree orig_base = base;
8329 if ((TREE_CODE (base) == INDIRECT_REF
8330 || (TREE_CODE (base) == MEM_REF
8331 && integer_zerop (TREE_OPERAND (base, 1))))
8332 && DECL_P (TREE_OPERAND (base, 0))
8333 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0))) == REFERENCE_TYPE)
8334 base = TREE_OPERAND (base, 0);
8336 gcc_assert (offset == NULL_TREE || poly_int_tree_p (offset));
8338 if (offset)
8339 poffset = wi::to_poly_offset (offset);
8340 else
8341 poffset = 0;
8343 if (maybe_ne (bitpos, 0))
8344 poffset += bits_to_bytes_round_down (bitpos);
8346 *bitposp = bitpos;
8347 *poffsetp = poffset;
8349 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8350 if (base_ref && orig_base != base)
8351 *base_ref = orig_base;
8353 return base;
8356 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8357 and previous omp contexts. */
8359 static void
8360 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
8361 enum omp_region_type region_type,
8362 enum tree_code code)
8364 struct gimplify_omp_ctx *ctx, *outer_ctx;
8365 tree c;
8366 hash_map<tree, tree> *struct_map_to_clause = NULL;
8367 hash_set<tree> *struct_deref_set = NULL;
8368 tree *prev_list_p = NULL, *orig_list_p = list_p;
8369 int handled_depend_iterators = -1;
8370 int nowait = -1;
8372 ctx = new_omp_context (region_type);
8373 ctx->code = code;
8374 outer_ctx = ctx->outer_context;
8375 if (code == OMP_TARGET)
8377 if (!lang_GNU_Fortran ())
8378 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8379 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
8381 if (!lang_GNU_Fortran ())
8382 switch (code)
8384 case OMP_TARGET:
8385 case OMP_TARGET_DATA:
8386 case OMP_TARGET_ENTER_DATA:
8387 case OMP_TARGET_EXIT_DATA:
8388 case OACC_DECLARE:
8389 case OACC_HOST_DATA:
8390 case OACC_PARALLEL:
8391 case OACC_KERNELS:
8392 ctx->target_firstprivatize_array_bases = true;
8393 default:
8394 break;
8397 while ((c = *list_p) != NULL)
8399 bool remove = false;
8400 bool notice_outer = true;
8401 const char *check_non_private = NULL;
8402 unsigned int flags;
8403 tree decl;
8405 switch (OMP_CLAUSE_CODE (c))
8407 case OMP_CLAUSE_PRIVATE:
8408 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8409 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8411 flags |= GOVD_PRIVATE_OUTER_REF;
8412 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8414 else
8415 notice_outer = false;
8416 goto do_add;
8417 case OMP_CLAUSE_SHARED:
8418 flags = GOVD_SHARED | GOVD_EXPLICIT;
8419 goto do_add;
8420 case OMP_CLAUSE_FIRSTPRIVATE:
8421 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8422 check_non_private = "firstprivate";
8423 goto do_add;
8424 case OMP_CLAUSE_LASTPRIVATE:
8425 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8426 switch (code)
8428 case OMP_DISTRIBUTE:
8429 error_at (OMP_CLAUSE_LOCATION (c),
8430 "conditional %<lastprivate%> clause on "
8431 "%qs construct", "distribute");
8432 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8433 break;
8434 case OMP_TASKLOOP:
8435 error_at (OMP_CLAUSE_LOCATION (c),
8436 "conditional %<lastprivate%> clause on "
8437 "%qs construct", "taskloop");
8438 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8439 break;
8440 default:
8441 break;
8443 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8444 if (code != OMP_LOOP)
8445 check_non_private = "lastprivate";
8446 decl = OMP_CLAUSE_DECL (c);
8447 if (error_operand_p (decl))
8448 goto do_add;
8449 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8450 && !lang_hooks.decls.omp_scalar_p (decl))
8452 error_at (OMP_CLAUSE_LOCATION (c),
8453 "non-scalar variable %qD in conditional "
8454 "%<lastprivate%> clause", decl);
8455 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8457 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8458 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
8459 if (outer_ctx
8460 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8461 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8462 == ORT_COMBINED_TEAMS))
8463 && splay_tree_lookup (outer_ctx->variables,
8464 (splay_tree_key) decl) == NULL)
8466 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8467 if (outer_ctx->outer_context)
8468 omp_notice_variable (outer_ctx->outer_context, decl, true);
8470 else if (outer_ctx
8471 && (outer_ctx->region_type & ORT_TASK) != 0
8472 && outer_ctx->combined_loop
8473 && splay_tree_lookup (outer_ctx->variables,
8474 (splay_tree_key) decl) == NULL)
8476 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8477 if (outer_ctx->outer_context)
8478 omp_notice_variable (outer_ctx->outer_context, decl, true);
8480 else if (outer_ctx
8481 && (outer_ctx->region_type == ORT_WORKSHARE
8482 || outer_ctx->region_type == ORT_ACC)
8483 && outer_ctx->combined_loop
8484 && splay_tree_lookup (outer_ctx->variables,
8485 (splay_tree_key) decl) == NULL
8486 && !omp_check_private (outer_ctx, decl, false))
8488 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8489 if (outer_ctx->outer_context
8490 && (outer_ctx->outer_context->region_type
8491 == ORT_COMBINED_PARALLEL)
8492 && splay_tree_lookup (outer_ctx->outer_context->variables,
8493 (splay_tree_key) decl) == NULL)
8495 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8496 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8497 if (octx->outer_context)
8499 octx = octx->outer_context;
8500 if (octx->region_type == ORT_WORKSHARE
8501 && octx->combined_loop
8502 && splay_tree_lookup (octx->variables,
8503 (splay_tree_key) decl) == NULL
8504 && !omp_check_private (octx, decl, false))
8506 omp_add_variable (octx, decl,
8507 GOVD_LASTPRIVATE | GOVD_SEEN);
8508 octx = octx->outer_context;
8509 if (octx
8510 && ((octx->region_type & ORT_COMBINED_TEAMS)
8511 == ORT_COMBINED_TEAMS)
8512 && (splay_tree_lookup (octx->variables,
8513 (splay_tree_key) decl)
8514 == NULL))
8516 omp_add_variable (octx, decl,
8517 GOVD_SHARED | GOVD_SEEN);
8518 octx = octx->outer_context;
8521 if (octx)
8522 omp_notice_variable (octx, decl, true);
8525 else if (outer_ctx->outer_context)
8526 omp_notice_variable (outer_ctx->outer_context, decl, true);
8528 goto do_add;
8529 case OMP_CLAUSE_REDUCTION:
8530 if (OMP_CLAUSE_REDUCTION_TASK (c))
8532 if (region_type == ORT_WORKSHARE)
8534 if (nowait == -1)
8535 nowait = omp_find_clause (*list_p,
8536 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8537 if (nowait
8538 && (outer_ctx == NULL
8539 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8541 error_at (OMP_CLAUSE_LOCATION (c),
8542 "%<task%> reduction modifier on a construct "
8543 "with a %<nowait%> clause");
8544 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8547 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8549 error_at (OMP_CLAUSE_LOCATION (c),
8550 "invalid %<task%> reduction modifier on construct "
8551 "other than %<parallel%>, %<for%> or %<sections%>");
8552 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8555 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
8556 switch (code)
8558 case OMP_SECTIONS:
8559 error_at (OMP_CLAUSE_LOCATION (c),
8560 "%<inscan%> %<reduction%> clause on "
8561 "%qs construct", "sections");
8562 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8563 break;
8564 case OMP_PARALLEL:
8565 error_at (OMP_CLAUSE_LOCATION (c),
8566 "%<inscan%> %<reduction%> clause on "
8567 "%qs construct", "parallel");
8568 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8569 break;
8570 case OMP_TEAMS:
8571 error_at (OMP_CLAUSE_LOCATION (c),
8572 "%<inscan%> %<reduction%> clause on "
8573 "%qs construct", "teams");
8574 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8575 break;
8576 case OMP_TASKLOOP:
8577 error_at (OMP_CLAUSE_LOCATION (c),
8578 "%<inscan%> %<reduction%> clause on "
8579 "%qs construct", "taskloop");
8580 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8581 break;
8582 default:
8583 break;
8585 /* FALLTHRU */
8586 case OMP_CLAUSE_IN_REDUCTION:
8587 case OMP_CLAUSE_TASK_REDUCTION:
8588 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8589 /* OpenACC permits reductions on private variables. */
8590 if (!(region_type & ORT_ACC)
8591 /* taskgroup is actually not a worksharing region. */
8592 && code != OMP_TASKGROUP)
8593 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8594 decl = OMP_CLAUSE_DECL (c);
8595 if (TREE_CODE (decl) == MEM_REF)
8597 tree type = TREE_TYPE (decl);
8598 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8599 NULL, is_gimple_val, fb_rvalue, false)
8600 == GS_ERROR)
8602 remove = true;
8603 break;
8605 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8606 if (DECL_P (v))
8608 omp_firstprivatize_variable (ctx, v);
8609 omp_notice_variable (ctx, v, true);
8611 decl = TREE_OPERAND (decl, 0);
8612 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8614 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8615 NULL, is_gimple_val, fb_rvalue, false)
8616 == GS_ERROR)
8618 remove = true;
8619 break;
8621 v = TREE_OPERAND (decl, 1);
8622 if (DECL_P (v))
8624 omp_firstprivatize_variable (ctx, v);
8625 omp_notice_variable (ctx, v, true);
8627 decl = TREE_OPERAND (decl, 0);
8629 if (TREE_CODE (decl) == ADDR_EXPR
8630 || TREE_CODE (decl) == INDIRECT_REF)
8631 decl = TREE_OPERAND (decl, 0);
8633 goto do_add_decl;
8634 case OMP_CLAUSE_LINEAR:
8635 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8636 is_gimple_val, fb_rvalue) == GS_ERROR)
8638 remove = true;
8639 break;
8641 else
8643 if (code == OMP_SIMD
8644 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8646 struct gimplify_omp_ctx *octx = outer_ctx;
8647 if (octx
8648 && octx->region_type == ORT_WORKSHARE
8649 && octx->combined_loop
8650 && !octx->distribute)
8652 if (octx->outer_context
8653 && (octx->outer_context->region_type
8654 == ORT_COMBINED_PARALLEL))
8655 octx = octx->outer_context->outer_context;
8656 else
8657 octx = octx->outer_context;
8659 if (octx
8660 && octx->region_type == ORT_WORKSHARE
8661 && octx->combined_loop
8662 && octx->distribute)
8664 error_at (OMP_CLAUSE_LOCATION (c),
8665 "%<linear%> clause for variable other than "
8666 "loop iterator specified on construct "
8667 "combined with %<distribute%>");
8668 remove = true;
8669 break;
8672 /* For combined #pragma omp parallel for simd, need to put
8673 lastprivate and perhaps firstprivate too on the
8674 parallel. Similarly for #pragma omp for simd. */
8675 struct gimplify_omp_ctx *octx = outer_ctx;
8676 decl = NULL_TREE;
8679 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8680 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8681 break;
8682 decl = OMP_CLAUSE_DECL (c);
8683 if (error_operand_p (decl))
8685 decl = NULL_TREE;
8686 break;
8688 flags = GOVD_SEEN;
8689 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8690 flags |= GOVD_FIRSTPRIVATE;
8691 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8692 flags |= GOVD_LASTPRIVATE;
8693 if (octx
8694 && octx->region_type == ORT_WORKSHARE
8695 && octx->combined_loop)
8697 if (octx->outer_context
8698 && (octx->outer_context->region_type
8699 == ORT_COMBINED_PARALLEL))
8700 octx = octx->outer_context;
8701 else if (omp_check_private (octx, decl, false))
8702 break;
8704 else if (octx
8705 && (octx->region_type & ORT_TASK) != 0
8706 && octx->combined_loop)
8708 else if (octx
8709 && octx->region_type == ORT_COMBINED_PARALLEL
8710 && ctx->region_type == ORT_WORKSHARE
8711 && octx == outer_ctx)
8712 flags = GOVD_SEEN | GOVD_SHARED;
8713 else if (octx
8714 && ((octx->region_type & ORT_COMBINED_TEAMS)
8715 == ORT_COMBINED_TEAMS))
8716 flags = GOVD_SEEN | GOVD_SHARED;
8717 else if (octx
8718 && octx->region_type == ORT_COMBINED_TARGET)
8720 flags &= ~GOVD_LASTPRIVATE;
8721 if (flags == GOVD_SEEN)
8722 break;
8724 else
8725 break;
8726 splay_tree_node on
8727 = splay_tree_lookup (octx->variables,
8728 (splay_tree_key) decl);
8729 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
8731 octx = NULL;
8732 break;
8734 omp_add_variable (octx, decl, flags);
8735 if (octx->outer_context == NULL)
8736 break;
8737 octx = octx->outer_context;
8739 while (1);
8740 if (octx
8741 && decl
8742 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8743 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8744 omp_notice_variable (octx, decl, true);
8746 flags = GOVD_LINEAR | GOVD_EXPLICIT;
8747 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8748 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8750 notice_outer = false;
8751 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8753 goto do_add;
8755 case OMP_CLAUSE_MAP:
8756 decl = OMP_CLAUSE_DECL (c);
8757 if (error_operand_p (decl))
8758 remove = true;
8759 switch (code)
8761 case OMP_TARGET:
8762 break;
8763 case OACC_DATA:
8764 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8765 break;
8766 /* FALLTHRU */
8767 case OMP_TARGET_DATA:
8768 case OMP_TARGET_ENTER_DATA:
8769 case OMP_TARGET_EXIT_DATA:
8770 case OACC_HOST_DATA:
8771 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8772 || (OMP_CLAUSE_MAP_KIND (c)
8773 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8774 /* For target {,enter ,exit }data only the array slice is
8775 mapped, but not the pointer to it. */
8776 remove = true;
8777 break;
8778 case OACC_ENTER_DATA:
8779 case OACC_EXIT_DATA:
8780 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8781 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET
8782 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8783 || (OMP_CLAUSE_MAP_KIND (c)
8784 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8785 remove = true;
8786 break;
8787 default:
8788 break;
8790 /* For Fortran, not only the pointer to the data is mapped but also
8791 the address of the pointer, the array descriptor etc.; for
8792 'exit data' - and in particular for 'delete:' - having an 'alloc:'
8793 does not make sense. Likewise, for 'update' only transferring the
8794 data itself is needed as the rest has been handled in previous
8795 directives. However, for 'exit data', the array descriptor needs
8796 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE. */
8797 if (code == OMP_TARGET_EXIT_DATA
8798 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
8799 OMP_CLAUSE_SET_MAP_KIND (c, OMP_CLAUSE_MAP_KIND (*prev_list_p)
8800 == GOMP_MAP_DELETE
8801 ? GOMP_MAP_DELETE : GOMP_MAP_RELEASE);
8802 else if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
8803 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8804 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
8805 remove = true;
8807 if (remove)
8808 break;
8809 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8811 struct gimplify_omp_ctx *octx;
8812 for (octx = outer_ctx; octx; octx = octx->outer_context)
8814 if (octx->region_type != ORT_ACC_HOST_DATA)
8815 break;
8816 splay_tree_node n2
8817 = splay_tree_lookup (octx->variables,
8818 (splay_tree_key) decl);
8819 if (n2)
8820 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8821 "declared in enclosing %<host_data%> region",
8822 DECL_NAME (decl));
8825 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8826 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8827 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8828 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8829 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8831 remove = true;
8832 break;
8834 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8835 || (OMP_CLAUSE_MAP_KIND (c)
8836 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8837 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8839 OMP_CLAUSE_SIZE (c)
8840 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8841 false);
8842 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
8843 GOVD_FIRSTPRIVATE | GOVD_SEEN);
8845 if (!DECL_P (decl))
8847 tree d = decl, *pd;
8848 if (TREE_CODE (d) == ARRAY_REF)
8850 while (TREE_CODE (d) == ARRAY_REF)
8851 d = TREE_OPERAND (d, 0);
8852 if (TREE_CODE (d) == COMPONENT_REF
8853 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
8854 decl = d;
8856 pd = &OMP_CLAUSE_DECL (c);
8857 if (d == decl
8858 && TREE_CODE (decl) == INDIRECT_REF
8859 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8860 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8861 == REFERENCE_TYPE))
8863 pd = &TREE_OPERAND (decl, 0);
8864 decl = TREE_OPERAND (decl, 0);
8866 bool indir_p = false;
8867 tree orig_decl = decl;
8868 tree decl_ref = NULL_TREE;
8869 if ((region_type & ORT_ACC) != 0
8870 && TREE_CODE (*pd) == COMPONENT_REF
8871 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH
8872 && code != OACC_UPDATE)
8874 while (TREE_CODE (decl) == COMPONENT_REF)
8876 decl = TREE_OPERAND (decl, 0);
8877 if ((TREE_CODE (decl) == MEM_REF
8878 && integer_zerop (TREE_OPERAND (decl, 1)))
8879 || INDIRECT_REF_P (decl))
8881 indir_p = true;
8882 decl = TREE_OPERAND (decl, 0);
8884 if (TREE_CODE (decl) == INDIRECT_REF
8885 && DECL_P (TREE_OPERAND (decl, 0))
8886 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8887 == REFERENCE_TYPE))
8889 decl_ref = decl;
8890 decl = TREE_OPERAND (decl, 0);
8894 else if (TREE_CODE (decl) == COMPONENT_REF)
8896 while (TREE_CODE (decl) == COMPONENT_REF)
8897 decl = TREE_OPERAND (decl, 0);
8898 if (TREE_CODE (decl) == INDIRECT_REF
8899 && DECL_P (TREE_OPERAND (decl, 0))
8900 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8901 == REFERENCE_TYPE))
8902 decl = TREE_OPERAND (decl, 0);
8904 if (decl != orig_decl && DECL_P (decl) && indir_p)
8906 gomp_map_kind k = (code == OACC_EXIT_DATA) ? GOMP_MAP_DETACH
8907 : GOMP_MAP_ATTACH;
8908 /* We have a dereference of a struct member. Make this an
8909 attach/detach operation, and ensure the base pointer is
8910 mapped as a FIRSTPRIVATE_POINTER. */
8911 OMP_CLAUSE_SET_MAP_KIND (c, k);
8912 flags = GOVD_MAP | GOVD_SEEN | GOVD_EXPLICIT;
8913 tree next_clause = OMP_CLAUSE_CHAIN (c);
8914 if (k == GOMP_MAP_ATTACH
8915 && code != OACC_ENTER_DATA
8916 && (!next_clause
8917 || (OMP_CLAUSE_CODE (next_clause) != OMP_CLAUSE_MAP)
8918 || (OMP_CLAUSE_MAP_KIND (next_clause)
8919 != GOMP_MAP_POINTER)
8920 || OMP_CLAUSE_DECL (next_clause) != decl)
8921 && (!struct_deref_set
8922 || !struct_deref_set->contains (decl)))
8924 if (!struct_deref_set)
8925 struct_deref_set = new hash_set<tree> ();
8926 /* As well as the attach, we also need a
8927 FIRSTPRIVATE_POINTER clause to properly map the
8928 pointer to the struct base. */
8929 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8930 OMP_CLAUSE_MAP);
8931 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALLOC);
8932 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2)
8933 = 1;
8934 tree charptr_zero
8935 = build_int_cst (build_pointer_type (char_type_node),
8937 OMP_CLAUSE_DECL (c2)
8938 = build2 (MEM_REF, char_type_node,
8939 decl_ref ? decl_ref : decl, charptr_zero);
8940 OMP_CLAUSE_SIZE (c2) = size_zero_node;
8941 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8942 OMP_CLAUSE_MAP);
8943 OMP_CLAUSE_SET_MAP_KIND (c3,
8944 GOMP_MAP_FIRSTPRIVATE_POINTER);
8945 OMP_CLAUSE_DECL (c3) = decl;
8946 OMP_CLAUSE_SIZE (c3) = size_zero_node;
8947 tree mapgrp = *prev_list_p;
8948 *prev_list_p = c2;
8949 OMP_CLAUSE_CHAIN (c3) = mapgrp;
8950 OMP_CLAUSE_CHAIN (c2) = c3;
8952 struct_deref_set->add (decl);
8954 goto do_add_decl;
8956 /* An "attach/detach" operation on an update directive should
8957 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
8958 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
8959 depends on the previous mapping. */
8960 if (code == OACC_UPDATE
8961 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
8962 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
8963 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
8964 == GS_ERROR)
8966 remove = true;
8967 break;
8969 if (DECL_P (decl)
8970 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
8971 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
8972 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
8973 && code != OACC_UPDATE)
8975 if (error_operand_p (decl))
8977 remove = true;
8978 break;
8981 tree stype = TREE_TYPE (decl);
8982 if (TREE_CODE (stype) == REFERENCE_TYPE)
8983 stype = TREE_TYPE (stype);
8984 if (TYPE_SIZE_UNIT (stype) == NULL
8985 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
8987 error_at (OMP_CLAUSE_LOCATION (c),
8988 "mapping field %qE of variable length "
8989 "structure", OMP_CLAUSE_DECL (c));
8990 remove = true;
8991 break;
8994 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
8995 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
8997 /* Error recovery. */
8998 if (prev_list_p == NULL)
9000 remove = true;
9001 break;
9003 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
9005 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
9006 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
9008 remove = true;
9009 break;
9014 poly_offset_int offset1;
9015 poly_int64 bitpos1;
9016 tree base_ref;
9018 tree base
9019 = extract_base_bit_offset (OMP_CLAUSE_DECL (c), &base_ref,
9020 &bitpos1, &offset1);
9022 gcc_assert (base == decl);
9024 splay_tree_node n
9025 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
9026 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
9027 == GOMP_MAP_ALWAYS_POINTER);
9028 bool attach_detach = (OMP_CLAUSE_MAP_KIND (c)
9029 == GOMP_MAP_ATTACH_DETACH);
9030 bool attach = OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
9031 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH;
9032 bool has_attachments = false;
9033 /* For OpenACC, pointers in structs should trigger an
9034 attach action. */
9035 if (attach_detach && (region_type & ORT_ACC) != 0)
9037 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9038 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9039 have detected a case that needs a GOMP_MAP_STRUCT
9040 mapping added. */
9041 gomp_map_kind k
9042 = (code == OACC_EXIT_DATA) ? GOMP_MAP_DETACH
9043 : GOMP_MAP_ATTACH;
9044 OMP_CLAUSE_SET_MAP_KIND (c, k);
9045 has_attachments = true;
9047 if (n == NULL || (n->value & GOVD_MAP) == 0)
9049 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9050 OMP_CLAUSE_MAP);
9051 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT
9052 : GOMP_MAP_STRUCT;
9054 OMP_CLAUSE_SET_MAP_KIND (l, k);
9055 if (base_ref)
9056 OMP_CLAUSE_DECL (l) = unshare_expr (base_ref);
9057 else
9058 OMP_CLAUSE_DECL (l) = decl;
9059 OMP_CLAUSE_SIZE (l)
9060 = (!attach
9061 ? size_int (1)
9062 : DECL_P (OMP_CLAUSE_DECL (l))
9063 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
9064 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l))));
9065 if (struct_map_to_clause == NULL)
9066 struct_map_to_clause = new hash_map<tree, tree>;
9067 struct_map_to_clause->put (decl, l);
9068 if (ptr || attach_detach)
9070 insert_struct_comp_map (code, c, l, *prev_list_p,
9071 NULL);
9072 *prev_list_p = l;
9073 prev_list_p = NULL;
9075 else
9077 OMP_CLAUSE_CHAIN (l) = c;
9078 *list_p = l;
9079 list_p = &OMP_CLAUSE_CHAIN (l);
9081 if (base_ref && code == OMP_TARGET)
9083 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9084 OMP_CLAUSE_MAP);
9085 enum gomp_map_kind mkind
9086 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
9087 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9088 OMP_CLAUSE_DECL (c2) = decl;
9089 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9090 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
9091 OMP_CLAUSE_CHAIN (l) = c2;
9093 flags = GOVD_MAP | GOVD_EXPLICIT;
9094 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9095 || ptr
9096 || attach_detach)
9097 flags |= GOVD_SEEN;
9098 if (has_attachments)
9099 flags |= GOVD_MAP_HAS_ATTACHMENTS;
9100 goto do_add_decl;
9102 else if (struct_map_to_clause)
9104 tree *osc = struct_map_to_clause->get (decl);
9105 tree *sc = NULL, *scp = NULL;
9106 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9107 || ptr
9108 || attach_detach)
9109 n->value |= GOVD_SEEN;
9110 sc = &OMP_CLAUSE_CHAIN (*osc);
9111 if (*sc != c
9112 && (OMP_CLAUSE_MAP_KIND (*sc)
9113 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9114 sc = &OMP_CLAUSE_CHAIN (*sc);
9115 /* Here "prev_list_p" is the end of the inserted
9116 alloc/release nodes after the struct node, OSC. */
9117 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
9118 if ((ptr || attach_detach) && sc == prev_list_p)
9119 break;
9120 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9121 != COMPONENT_REF
9122 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9123 != INDIRECT_REF)
9124 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9125 != ARRAY_REF))
9126 break;
9127 else
9129 tree sc_decl = OMP_CLAUSE_DECL (*sc);
9130 poly_offset_int offsetn;
9131 poly_int64 bitposn;
9132 tree base
9133 = extract_base_bit_offset (sc_decl, NULL,
9134 &bitposn, &offsetn);
9135 if (base != decl)
9136 break;
9137 if (scp)
9138 continue;
9139 tree d1 = OMP_CLAUSE_DECL (*sc);
9140 tree d2 = OMP_CLAUSE_DECL (c);
9141 while (TREE_CODE (d1) == ARRAY_REF)
9142 d1 = TREE_OPERAND (d1, 0);
9143 while (TREE_CODE (d2) == ARRAY_REF)
9144 d2 = TREE_OPERAND (d2, 0);
9145 if (TREE_CODE (d1) == INDIRECT_REF)
9146 d1 = TREE_OPERAND (d1, 0);
9147 if (TREE_CODE (d2) == INDIRECT_REF)
9148 d2 = TREE_OPERAND (d2, 0);
9149 while (TREE_CODE (d1) == COMPONENT_REF)
9150 if (TREE_CODE (d2) == COMPONENT_REF
9151 && TREE_OPERAND (d1, 1)
9152 == TREE_OPERAND (d2, 1))
9154 d1 = TREE_OPERAND (d1, 0);
9155 d2 = TREE_OPERAND (d2, 0);
9157 else
9158 break;
9159 if (d1 == d2)
9161 error_at (OMP_CLAUSE_LOCATION (c),
9162 "%qE appears more than once in map "
9163 "clauses", OMP_CLAUSE_DECL (c));
9164 remove = true;
9165 break;
9167 if (maybe_lt (offset1, offsetn)
9168 || (known_eq (offset1, offsetn)
9169 && maybe_lt (bitpos1, bitposn)))
9171 if (ptr || attach_detach)
9172 scp = sc;
9173 else
9174 break;
9177 if (remove)
9178 break;
9179 if (!attach)
9180 OMP_CLAUSE_SIZE (*osc)
9181 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
9182 size_one_node);
9183 if (ptr || attach_detach)
9185 tree cl = insert_struct_comp_map (code, c, NULL,
9186 *prev_list_p, scp);
9187 if (sc == prev_list_p)
9189 *sc = cl;
9190 prev_list_p = NULL;
9192 else
9194 *prev_list_p = OMP_CLAUSE_CHAIN (c);
9195 list_p = prev_list_p;
9196 prev_list_p = NULL;
9197 OMP_CLAUSE_CHAIN (c) = *sc;
9198 *sc = cl;
9199 continue;
9202 else if (*sc != c)
9204 *list_p = OMP_CLAUSE_CHAIN (c);
9205 OMP_CLAUSE_CHAIN (c) = *sc;
9206 *sc = c;
9207 continue;
9211 if (!remove
9212 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
9213 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
9214 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
9215 && OMP_CLAUSE_CHAIN (c)
9216 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
9217 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9218 == GOMP_MAP_ALWAYS_POINTER)
9219 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9220 == GOMP_MAP_ATTACH_DETACH)
9221 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9222 == GOMP_MAP_TO_PSET)))
9223 prev_list_p = list_p;
9225 break;
9227 flags = GOVD_MAP | GOVD_EXPLICIT;
9228 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
9229 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
9230 flags |= GOVD_MAP_ALWAYS_TO;
9231 goto do_add;
9233 case OMP_CLAUSE_DEPEND:
9234 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9236 tree deps = OMP_CLAUSE_DECL (c);
9237 while (deps && TREE_CODE (deps) == TREE_LIST)
9239 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
9240 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
9241 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
9242 pre_p, NULL, is_gimple_val, fb_rvalue);
9243 deps = TREE_CHAIN (deps);
9245 break;
9247 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9248 break;
9249 if (handled_depend_iterators == -1)
9250 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
9251 if (handled_depend_iterators)
9253 if (handled_depend_iterators == 2)
9254 remove = true;
9255 break;
9257 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9259 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9260 NULL, is_gimple_val, fb_rvalue);
9261 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9263 if (error_operand_p (OMP_CLAUSE_DECL (c)))
9265 remove = true;
9266 break;
9268 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9269 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9270 is_gimple_val, fb_rvalue) == GS_ERROR)
9272 remove = true;
9273 break;
9275 break;
9277 case OMP_CLAUSE_TO:
9278 case OMP_CLAUSE_FROM:
9279 case OMP_CLAUSE__CACHE_:
9280 decl = OMP_CLAUSE_DECL (c);
9281 if (error_operand_p (decl))
9283 remove = true;
9284 break;
9286 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9287 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
9288 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
9289 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
9290 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9292 remove = true;
9293 break;
9295 if (!DECL_P (decl))
9297 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
9298 NULL, is_gimple_lvalue, fb_lvalue)
9299 == GS_ERROR)
9301 remove = true;
9302 break;
9304 break;
9306 goto do_notice;
9308 case OMP_CLAUSE_USE_DEVICE_PTR:
9309 case OMP_CLAUSE_USE_DEVICE_ADDR:
9310 flags = GOVD_EXPLICIT;
9311 goto do_add;
9313 case OMP_CLAUSE_IS_DEVICE_PTR:
9314 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
9315 goto do_add;
9317 do_add:
9318 decl = OMP_CLAUSE_DECL (c);
9319 do_add_decl:
9320 if (error_operand_p (decl))
9322 remove = true;
9323 break;
9325 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
9327 tree t = omp_member_access_dummy_var (decl);
9328 if (t)
9330 tree v = DECL_VALUE_EXPR (decl);
9331 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
9332 if (outer_ctx)
9333 omp_notice_variable (outer_ctx, t, true);
9336 if (code == OACC_DATA
9337 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9338 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9339 flags |= GOVD_MAP_0LEN_ARRAY;
9340 omp_add_variable (ctx, decl, flags);
9341 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9342 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
9343 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9344 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9346 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
9347 GOVD_LOCAL | GOVD_SEEN);
9348 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
9349 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
9350 find_decl_expr,
9351 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9352 NULL) == NULL_TREE)
9353 omp_add_variable (ctx,
9354 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9355 GOVD_LOCAL | GOVD_SEEN);
9356 gimplify_omp_ctxp = ctx;
9357 push_gimplify_context ();
9359 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9360 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9362 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
9363 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
9364 pop_gimplify_context
9365 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
9366 push_gimplify_context ();
9367 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
9368 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9369 pop_gimplify_context
9370 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
9371 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
9372 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
9374 gimplify_omp_ctxp = outer_ctx;
9376 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9377 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
9379 gimplify_omp_ctxp = ctx;
9380 push_gimplify_context ();
9381 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
9383 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9384 NULL, NULL);
9385 TREE_SIDE_EFFECTS (bind) = 1;
9386 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
9387 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
9389 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
9390 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
9391 pop_gimplify_context
9392 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
9393 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
9395 gimplify_omp_ctxp = outer_ctx;
9397 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9398 && OMP_CLAUSE_LINEAR_STMT (c))
9400 gimplify_omp_ctxp = ctx;
9401 push_gimplify_context ();
9402 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
9404 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9405 NULL, NULL);
9406 TREE_SIDE_EFFECTS (bind) = 1;
9407 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
9408 OMP_CLAUSE_LINEAR_STMT (c) = bind;
9410 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
9411 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
9412 pop_gimplify_context
9413 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
9414 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
9416 gimplify_omp_ctxp = outer_ctx;
9418 if (notice_outer)
9419 goto do_notice;
9420 break;
9422 case OMP_CLAUSE_COPYIN:
9423 case OMP_CLAUSE_COPYPRIVATE:
9424 decl = OMP_CLAUSE_DECL (c);
9425 if (error_operand_p (decl))
9427 remove = true;
9428 break;
9430 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
9431 && !remove
9432 && !omp_check_private (ctx, decl, true))
9434 remove = true;
9435 if (is_global_var (decl))
9437 if (DECL_THREAD_LOCAL_P (decl))
9438 remove = false;
9439 else if (DECL_HAS_VALUE_EXPR_P (decl))
9441 tree value = get_base_address (DECL_VALUE_EXPR (decl));
9443 if (value
9444 && DECL_P (value)
9445 && DECL_THREAD_LOCAL_P (value))
9446 remove = false;
9449 if (remove)
9450 error_at (OMP_CLAUSE_LOCATION (c),
9451 "copyprivate variable %qE is not threadprivate"
9452 " or private in outer context", DECL_NAME (decl));
9454 do_notice:
9455 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9456 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9457 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9458 && outer_ctx
9459 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9460 || (region_type == ORT_WORKSHARE
9461 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9462 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
9463 || code == OMP_LOOP)))
9464 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
9465 || (code == OMP_LOOP
9466 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9467 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
9468 == ORT_COMBINED_TEAMS))))
9470 splay_tree_node on
9471 = splay_tree_lookup (outer_ctx->variables,
9472 (splay_tree_key)decl);
9473 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9475 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9476 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9477 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9478 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9479 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9480 == POINTER_TYPE))))
9481 omp_firstprivatize_variable (outer_ctx, decl);
9482 else
9484 omp_add_variable (outer_ctx, decl,
9485 GOVD_SEEN | GOVD_SHARED);
9486 if (outer_ctx->outer_context)
9487 omp_notice_variable (outer_ctx->outer_context, decl,
9488 true);
9492 if (outer_ctx)
9493 omp_notice_variable (outer_ctx, decl, true);
9494 if (check_non_private
9495 && region_type == ORT_WORKSHARE
9496 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9497 || decl == OMP_CLAUSE_DECL (c)
9498 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9499 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9500 == ADDR_EXPR
9501 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9502 == POINTER_PLUS_EXPR
9503 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9504 (OMP_CLAUSE_DECL (c), 0), 0))
9505 == ADDR_EXPR)))))
9506 && omp_check_private (ctx, decl, false))
9508 error ("%s variable %qE is private in outer context",
9509 check_non_private, DECL_NAME (decl));
9510 remove = true;
9512 break;
9514 case OMP_CLAUSE_IF:
9515 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9516 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9518 const char *p[2];
9519 for (int i = 0; i < 2; i++)
9520 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9522 case VOID_CST: p[i] = "cancel"; break;
9523 case OMP_PARALLEL: p[i] = "parallel"; break;
9524 case OMP_SIMD: p[i] = "simd"; break;
9525 case OMP_TASK: p[i] = "task"; break;
9526 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9527 case OMP_TARGET_DATA: p[i] = "target data"; break;
9528 case OMP_TARGET: p[i] = "target"; break;
9529 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9530 case OMP_TARGET_ENTER_DATA:
9531 p[i] = "target enter data"; break;
9532 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9533 default: gcc_unreachable ();
9535 error_at (OMP_CLAUSE_LOCATION (c),
9536 "expected %qs %<if%> clause modifier rather than %qs",
9537 p[0], p[1]);
9538 remove = true;
9540 /* Fall through. */
9542 case OMP_CLAUSE_FINAL:
9543 OMP_CLAUSE_OPERAND (c, 0)
9544 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9545 /* Fall through. */
9547 case OMP_CLAUSE_SCHEDULE:
9548 case OMP_CLAUSE_NUM_THREADS:
9549 case OMP_CLAUSE_NUM_TEAMS:
9550 case OMP_CLAUSE_THREAD_LIMIT:
9551 case OMP_CLAUSE_DIST_SCHEDULE:
9552 case OMP_CLAUSE_DEVICE:
9553 case OMP_CLAUSE_PRIORITY:
9554 case OMP_CLAUSE_GRAINSIZE:
9555 case OMP_CLAUSE_NUM_TASKS:
9556 case OMP_CLAUSE_HINT:
9557 case OMP_CLAUSE_ASYNC:
9558 case OMP_CLAUSE_WAIT:
9559 case OMP_CLAUSE_NUM_GANGS:
9560 case OMP_CLAUSE_NUM_WORKERS:
9561 case OMP_CLAUSE_VECTOR_LENGTH:
9562 case OMP_CLAUSE_WORKER:
9563 case OMP_CLAUSE_VECTOR:
9564 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9565 is_gimple_val, fb_rvalue) == GS_ERROR)
9566 remove = true;
9567 break;
9569 case OMP_CLAUSE_GANG:
9570 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9571 is_gimple_val, fb_rvalue) == GS_ERROR)
9572 remove = true;
9573 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9574 is_gimple_val, fb_rvalue) == GS_ERROR)
9575 remove = true;
9576 break;
9578 case OMP_CLAUSE_NOWAIT:
9579 nowait = 1;
9580 break;
9582 case OMP_CLAUSE_ORDERED:
9583 case OMP_CLAUSE_UNTIED:
9584 case OMP_CLAUSE_COLLAPSE:
9585 case OMP_CLAUSE_TILE:
9586 case OMP_CLAUSE_AUTO:
9587 case OMP_CLAUSE_SEQ:
9588 case OMP_CLAUSE_INDEPENDENT:
9589 case OMP_CLAUSE_MERGEABLE:
9590 case OMP_CLAUSE_PROC_BIND:
9591 case OMP_CLAUSE_SAFELEN:
9592 case OMP_CLAUSE_SIMDLEN:
9593 case OMP_CLAUSE_NOGROUP:
9594 case OMP_CLAUSE_THREADS:
9595 case OMP_CLAUSE_SIMD:
9596 case OMP_CLAUSE_BIND:
9597 case OMP_CLAUSE_IF_PRESENT:
9598 case OMP_CLAUSE_FINALIZE:
9599 break;
9601 case OMP_CLAUSE_ORDER:
9602 ctx->order_concurrent = true;
9603 break;
9605 case OMP_CLAUSE_DEFAULTMAP:
9606 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9607 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9609 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9610 gdmkmin = GDMK_SCALAR;
9611 gdmkmax = GDMK_POINTER;
9612 break;
9613 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9614 gdmkmin = gdmkmax = GDMK_SCALAR;
9615 break;
9616 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9617 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9618 break;
9619 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9620 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9621 break;
9622 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9623 gdmkmin = gdmkmax = GDMK_POINTER;
9624 break;
9625 default:
9626 gcc_unreachable ();
9628 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9629 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9631 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9632 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9633 break;
9634 case OMP_CLAUSE_DEFAULTMAP_TO:
9635 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9636 break;
9637 case OMP_CLAUSE_DEFAULTMAP_FROM:
9638 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9639 break;
9640 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9641 ctx->defaultmap[gdmk] = GOVD_MAP;
9642 break;
9643 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9644 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9645 break;
9646 case OMP_CLAUSE_DEFAULTMAP_NONE:
9647 ctx->defaultmap[gdmk] = 0;
9648 break;
9649 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9650 switch (gdmk)
9652 case GDMK_SCALAR:
9653 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9654 break;
9655 case GDMK_AGGREGATE:
9656 case GDMK_ALLOCATABLE:
9657 ctx->defaultmap[gdmk] = GOVD_MAP;
9658 break;
9659 case GDMK_POINTER:
9660 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9661 break;
9662 default:
9663 gcc_unreachable ();
9665 break;
9666 default:
9667 gcc_unreachable ();
9669 break;
9671 case OMP_CLAUSE_ALIGNED:
9672 decl = OMP_CLAUSE_DECL (c);
9673 if (error_operand_p (decl))
9675 remove = true;
9676 break;
9678 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9679 is_gimple_val, fb_rvalue) == GS_ERROR)
9681 remove = true;
9682 break;
9684 if (!is_global_var (decl)
9685 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9686 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9687 break;
9689 case OMP_CLAUSE_NONTEMPORAL:
9690 decl = OMP_CLAUSE_DECL (c);
9691 if (error_operand_p (decl))
9693 remove = true;
9694 break;
9696 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9697 break;
9699 case OMP_CLAUSE_DEFAULT:
9700 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9701 break;
9703 case OMP_CLAUSE_INCLUSIVE:
9704 case OMP_CLAUSE_EXCLUSIVE:
9705 decl = OMP_CLAUSE_DECL (c);
9707 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
9708 (splay_tree_key) decl);
9709 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
9711 error_at (OMP_CLAUSE_LOCATION (c),
9712 "%qD specified in %qs clause but not in %<inscan%> "
9713 "%<reduction%> clause on the containing construct",
9714 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
9715 remove = true;
9717 else
9719 n->value |= GOVD_REDUCTION_INSCAN;
9720 if (outer_ctx->region_type == ORT_SIMD
9721 && outer_ctx->outer_context
9722 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
9724 n = splay_tree_lookup (outer_ctx->outer_context->variables,
9725 (splay_tree_key) decl);
9726 if (n && (n->value & GOVD_REDUCTION) != 0)
9727 n->value |= GOVD_REDUCTION_INSCAN;
9731 break;
9733 default:
9734 gcc_unreachable ();
9737 if (code == OACC_DATA
9738 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9739 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9740 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9741 remove = true;
9742 if (remove)
9743 *list_p = OMP_CLAUSE_CHAIN (c);
9744 else
9745 list_p = &OMP_CLAUSE_CHAIN (c);
9748 ctx->clauses = *orig_list_p;
9749 gimplify_omp_ctxp = ctx;
9750 if (struct_map_to_clause)
9751 delete struct_map_to_clause;
9752 if (struct_deref_set)
9753 delete struct_deref_set;
9756 /* Return true if DECL is a candidate for shared to firstprivate
9757 optimization. We only consider non-addressable scalars, not
9758 too big, and not references. */
9760 static bool
9761 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
9763 if (TREE_ADDRESSABLE (decl))
9764 return false;
9765 tree type = TREE_TYPE (decl);
9766 if (!is_gimple_reg_type (type)
9767 || TREE_CODE (type) == REFERENCE_TYPE
9768 || TREE_ADDRESSABLE (type))
9769 return false;
9770 /* Don't optimize too large decls, as each thread/task will have
9771 its own. */
9772 HOST_WIDE_INT len = int_size_in_bytes (type);
9773 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
9774 return false;
9775 if (lang_hooks.decls.omp_privatize_by_reference (decl))
9776 return false;
9777 return true;
9780 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9781 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9782 GOVD_WRITTEN in outer contexts. */
9784 static void
9785 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
9787 for (; ctx; ctx = ctx->outer_context)
9789 splay_tree_node n = splay_tree_lookup (ctx->variables,
9790 (splay_tree_key) decl);
9791 if (n == NULL)
9792 continue;
9793 else if (n->value & GOVD_SHARED)
9795 n->value |= GOVD_WRITTEN;
9796 return;
9798 else if (n->value & GOVD_DATA_SHARE_CLASS)
9799 return;
9803 /* Helper callback for walk_gimple_seq to discover possible stores
9804 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9805 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9806 for those. */
9808 static tree
9809 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
9811 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
9813 *walk_subtrees = 0;
9814 if (!wi->is_lhs)
9815 return NULL_TREE;
9817 tree op = *tp;
9820 if (handled_component_p (op))
9821 op = TREE_OPERAND (op, 0);
9822 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
9823 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
9824 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
9825 else
9826 break;
9828 while (1);
9829 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
9830 return NULL_TREE;
9832 omp_mark_stores (gimplify_omp_ctxp, op);
9833 return NULL_TREE;
9836 /* Helper callback for walk_gimple_seq to discover possible stores
9837 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9838 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9839 for those. */
9841 static tree
9842 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
9843 bool *handled_ops_p,
9844 struct walk_stmt_info *wi)
9846 gimple *stmt = gsi_stmt (*gsi_p);
9847 switch (gimple_code (stmt))
9849 /* Don't recurse on OpenMP constructs for which
9850 gimplify_adjust_omp_clauses already handled the bodies,
9851 except handle gimple_omp_for_pre_body. */
9852 case GIMPLE_OMP_FOR:
9853 *handled_ops_p = true;
9854 if (gimple_omp_for_pre_body (stmt))
9855 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9856 omp_find_stores_stmt, omp_find_stores_op, wi);
9857 break;
9858 case GIMPLE_OMP_PARALLEL:
9859 case GIMPLE_OMP_TASK:
9860 case GIMPLE_OMP_SECTIONS:
9861 case GIMPLE_OMP_SINGLE:
9862 case GIMPLE_OMP_TARGET:
9863 case GIMPLE_OMP_TEAMS:
9864 case GIMPLE_OMP_CRITICAL:
9865 *handled_ops_p = true;
9866 break;
9867 default:
9868 break;
9870 return NULL_TREE;
9873 struct gimplify_adjust_omp_clauses_data
9875 tree *list_p;
9876 gimple_seq *pre_p;
9879 /* For all variables that were not actually used within the context,
9880 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9882 static int
9883 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
9885 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
9886 gimple_seq *pre_p
9887 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
9888 tree decl = (tree) n->key;
9889 unsigned flags = n->value;
9890 enum omp_clause_code code;
9891 tree clause;
9892 bool private_debug;
9894 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9895 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
9896 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
9897 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
9898 return 0;
9899 if ((flags & GOVD_SEEN) == 0)
9900 return 0;
9901 if ((flags & GOVD_MAP_HAS_ATTACHMENTS) != 0)
9902 return 0;
9903 if (flags & GOVD_DEBUG_PRIVATE)
9905 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
9906 private_debug = true;
9908 else if (flags & GOVD_MAP)
9909 private_debug = false;
9910 else
9911 private_debug
9912 = lang_hooks.decls.omp_private_debug_clause (decl,
9913 !!(flags & GOVD_SHARED));
9914 if (private_debug)
9915 code = OMP_CLAUSE_PRIVATE;
9916 else if (flags & GOVD_MAP)
9918 code = OMP_CLAUSE_MAP;
9919 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9920 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9922 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
9923 return 0;
9925 if (VAR_P (decl)
9926 && DECL_IN_CONSTANT_POOL (decl)
9927 && !lookup_attribute ("omp declare target",
9928 DECL_ATTRIBUTES (decl)))
9930 tree id = get_identifier ("omp declare target");
9931 DECL_ATTRIBUTES (decl)
9932 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
9933 varpool_node *node = varpool_node::get (decl);
9934 if (node)
9936 node->offloadable = 1;
9937 if (ENABLE_OFFLOADING)
9938 g->have_offload = true;
9942 else if (flags & GOVD_SHARED)
9944 if (is_global_var (decl))
9946 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9947 while (ctx != NULL)
9949 splay_tree_node on
9950 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9951 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
9952 | GOVD_PRIVATE | GOVD_REDUCTION
9953 | GOVD_LINEAR | GOVD_MAP)) != 0)
9954 break;
9955 ctx = ctx->outer_context;
9957 if (ctx == NULL)
9958 return 0;
9960 code = OMP_CLAUSE_SHARED;
9962 else if (flags & GOVD_PRIVATE)
9963 code = OMP_CLAUSE_PRIVATE;
9964 else if (flags & GOVD_FIRSTPRIVATE)
9966 code = OMP_CLAUSE_FIRSTPRIVATE;
9967 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
9968 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9969 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9971 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9972 "%<target%> construct", decl);
9973 return 0;
9976 else if (flags & GOVD_LASTPRIVATE)
9977 code = OMP_CLAUSE_LASTPRIVATE;
9978 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
9979 return 0;
9980 else if (flags & GOVD_CONDTEMP)
9982 code = OMP_CLAUSE__CONDTEMP_;
9983 gimple_add_tmp_var (decl);
9985 else
9986 gcc_unreachable ();
9988 if (((flags & GOVD_LASTPRIVATE)
9989 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
9990 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9991 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9993 tree chain = *list_p;
9994 clause = build_omp_clause (input_location, code);
9995 OMP_CLAUSE_DECL (clause) = decl;
9996 OMP_CLAUSE_CHAIN (clause) = chain;
9997 if (private_debug)
9998 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
9999 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
10000 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
10001 else if (code == OMP_CLAUSE_SHARED
10002 && (flags & GOVD_WRITTEN) == 0
10003 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10004 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
10005 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
10006 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
10007 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
10009 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
10010 OMP_CLAUSE_DECL (nc) = decl;
10011 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10012 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10013 OMP_CLAUSE_DECL (clause)
10014 = build_simple_mem_ref_loc (input_location, decl);
10015 OMP_CLAUSE_DECL (clause)
10016 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
10017 build_int_cst (build_pointer_type (char_type_node), 0));
10018 OMP_CLAUSE_SIZE (clause) = size_zero_node;
10019 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10020 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
10021 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
10022 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10023 OMP_CLAUSE_CHAIN (nc) = chain;
10024 OMP_CLAUSE_CHAIN (clause) = nc;
10025 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10026 gimplify_omp_ctxp = ctx->outer_context;
10027 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
10028 pre_p, NULL, is_gimple_val, fb_rvalue);
10029 gimplify_omp_ctxp = ctx;
10031 else if (code == OMP_CLAUSE_MAP)
10033 int kind;
10034 /* Not all combinations of these GOVD_MAP flags are actually valid. */
10035 switch (flags & (GOVD_MAP_TO_ONLY
10036 | GOVD_MAP_FORCE
10037 | GOVD_MAP_FORCE_PRESENT
10038 | GOVD_MAP_ALLOC_ONLY
10039 | GOVD_MAP_FROM_ONLY))
10041 case 0:
10042 kind = GOMP_MAP_TOFROM;
10043 break;
10044 case GOVD_MAP_FORCE:
10045 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
10046 break;
10047 case GOVD_MAP_TO_ONLY:
10048 kind = GOMP_MAP_TO;
10049 break;
10050 case GOVD_MAP_FROM_ONLY:
10051 kind = GOMP_MAP_FROM;
10052 break;
10053 case GOVD_MAP_ALLOC_ONLY:
10054 kind = GOMP_MAP_ALLOC;
10055 break;
10056 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
10057 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
10058 break;
10059 case GOVD_MAP_FORCE_PRESENT:
10060 kind = GOMP_MAP_FORCE_PRESENT;
10061 break;
10062 default:
10063 gcc_unreachable ();
10065 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
10066 if (DECL_SIZE (decl)
10067 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10069 tree decl2 = DECL_VALUE_EXPR (decl);
10070 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10071 decl2 = TREE_OPERAND (decl2, 0);
10072 gcc_assert (DECL_P (decl2));
10073 tree mem = build_simple_mem_ref (decl2);
10074 OMP_CLAUSE_DECL (clause) = mem;
10075 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10076 if (gimplify_omp_ctxp->outer_context)
10078 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
10079 omp_notice_variable (ctx, decl2, true);
10080 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
10082 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10083 OMP_CLAUSE_MAP);
10084 OMP_CLAUSE_DECL (nc) = decl;
10085 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10086 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
10087 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10088 else
10089 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10090 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10091 OMP_CLAUSE_CHAIN (clause) = nc;
10093 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
10094 && lang_hooks.decls.omp_privatize_by_reference (decl))
10096 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
10097 OMP_CLAUSE_SIZE (clause)
10098 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
10099 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10100 gimplify_omp_ctxp = ctx->outer_context;
10101 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
10102 pre_p, NULL, is_gimple_val, fb_rvalue);
10103 gimplify_omp_ctxp = ctx;
10104 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10105 OMP_CLAUSE_MAP);
10106 OMP_CLAUSE_DECL (nc) = decl;
10107 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10108 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
10109 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10110 OMP_CLAUSE_CHAIN (clause) = nc;
10112 else
10113 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
10115 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
10117 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
10118 OMP_CLAUSE_DECL (nc) = decl;
10119 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
10120 OMP_CLAUSE_CHAIN (nc) = chain;
10121 OMP_CLAUSE_CHAIN (clause) = nc;
10122 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10123 gimplify_omp_ctxp = ctx->outer_context;
10124 lang_hooks.decls.omp_finish_clause (nc, pre_p);
10125 gimplify_omp_ctxp = ctx;
10127 *list_p = clause;
10128 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10129 gimplify_omp_ctxp = ctx->outer_context;
10130 lang_hooks.decls.omp_finish_clause (clause, pre_p);
10131 if (gimplify_omp_ctxp)
10132 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
10133 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
10134 && DECL_P (OMP_CLAUSE_SIZE (clause)))
10135 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
10136 true);
10137 gimplify_omp_ctxp = ctx;
10138 return 0;
10141 static void
10142 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
10143 enum tree_code code)
10145 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10146 tree *orig_list_p = list_p;
10147 tree c, decl;
10148 bool has_inscan_reductions = false;
10150 if (body)
10152 struct gimplify_omp_ctx *octx;
10153 for (octx = ctx; octx; octx = octx->outer_context)
10154 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
10155 break;
10156 if (octx)
10158 struct walk_stmt_info wi;
10159 memset (&wi, 0, sizeof (wi));
10160 walk_gimple_seq (body, omp_find_stores_stmt,
10161 omp_find_stores_op, &wi);
10165 if (ctx->add_safelen1)
10167 /* If there are VLAs in the body of simd loop, prevent
10168 vectorization. */
10169 gcc_assert (ctx->region_type == ORT_SIMD);
10170 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
10171 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
10172 OMP_CLAUSE_CHAIN (c) = *list_p;
10173 *list_p = c;
10174 list_p = &OMP_CLAUSE_CHAIN (c);
10177 if (ctx->region_type == ORT_WORKSHARE
10178 && ctx->outer_context
10179 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
10181 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
10182 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10183 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10185 decl = OMP_CLAUSE_DECL (c);
10186 splay_tree_node n
10187 = splay_tree_lookup (ctx->outer_context->variables,
10188 (splay_tree_key) decl);
10189 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
10190 (splay_tree_key) decl));
10191 omp_add_variable (ctx, decl, n->value);
10192 tree c2 = copy_node (c);
10193 OMP_CLAUSE_CHAIN (c2) = *list_p;
10194 *list_p = c2;
10195 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
10196 continue;
10197 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10198 OMP_CLAUSE_FIRSTPRIVATE);
10199 OMP_CLAUSE_DECL (c2) = decl;
10200 OMP_CLAUSE_CHAIN (c2) = *list_p;
10201 *list_p = c2;
10204 while ((c = *list_p) != NULL)
10206 splay_tree_node n;
10207 bool remove = false;
10209 switch (OMP_CLAUSE_CODE (c))
10211 case OMP_CLAUSE_FIRSTPRIVATE:
10212 if ((ctx->region_type & ORT_TARGET)
10213 && (ctx->region_type & ORT_ACC) == 0
10214 && TYPE_ATOMIC (strip_array_types
10215 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
10217 error_at (OMP_CLAUSE_LOCATION (c),
10218 "%<_Atomic%> %qD in %<firstprivate%> clause on "
10219 "%<target%> construct", OMP_CLAUSE_DECL (c));
10220 remove = true;
10221 break;
10223 /* FALLTHRU */
10224 case OMP_CLAUSE_PRIVATE:
10225 case OMP_CLAUSE_SHARED:
10226 case OMP_CLAUSE_LINEAR:
10227 decl = OMP_CLAUSE_DECL (c);
10228 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10229 remove = !(n->value & GOVD_SEEN);
10230 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
10231 && code == OMP_PARALLEL
10232 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10233 remove = true;
10234 if (! remove)
10236 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
10237 if ((n->value & GOVD_DEBUG_PRIVATE)
10238 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
10240 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
10241 || ((n->value & GOVD_DATA_SHARE_CLASS)
10242 == GOVD_SHARED));
10243 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
10244 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
10246 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10247 && (n->value & GOVD_WRITTEN) == 0
10248 && DECL_P (decl)
10249 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10250 OMP_CLAUSE_SHARED_READONLY (c) = 1;
10251 else if (DECL_P (decl)
10252 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10253 && (n->value & GOVD_WRITTEN) != 0)
10254 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10255 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
10256 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10257 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10259 break;
10261 case OMP_CLAUSE_LASTPRIVATE:
10262 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
10263 accurately reflect the presence of a FIRSTPRIVATE clause. */
10264 decl = OMP_CLAUSE_DECL (c);
10265 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10266 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
10267 = (n->value & GOVD_FIRSTPRIVATE) != 0;
10268 if (code == OMP_DISTRIBUTE
10269 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10271 remove = true;
10272 error_at (OMP_CLAUSE_LOCATION (c),
10273 "same variable used in %<firstprivate%> and "
10274 "%<lastprivate%> clauses on %<distribute%> "
10275 "construct");
10277 if (!remove
10278 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10279 && DECL_P (decl)
10280 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10281 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10282 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
10283 remove = true;
10284 break;
10286 case OMP_CLAUSE_ALIGNED:
10287 decl = OMP_CLAUSE_DECL (c);
10288 if (!is_global_var (decl))
10290 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10291 remove = n == NULL || !(n->value & GOVD_SEEN);
10292 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
10294 struct gimplify_omp_ctx *octx;
10295 if (n != NULL
10296 && (n->value & (GOVD_DATA_SHARE_CLASS
10297 & ~GOVD_FIRSTPRIVATE)))
10298 remove = true;
10299 else
10300 for (octx = ctx->outer_context; octx;
10301 octx = octx->outer_context)
10303 n = splay_tree_lookup (octx->variables,
10304 (splay_tree_key) decl);
10305 if (n == NULL)
10306 continue;
10307 if (n->value & GOVD_LOCAL)
10308 break;
10309 /* We have to avoid assigning a shared variable
10310 to itself when trying to add
10311 __builtin_assume_aligned. */
10312 if (n->value & GOVD_SHARED)
10314 remove = true;
10315 break;
10320 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
10322 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10323 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10324 remove = true;
10326 break;
10328 case OMP_CLAUSE_NONTEMPORAL:
10329 decl = OMP_CLAUSE_DECL (c);
10330 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10331 remove = n == NULL || !(n->value & GOVD_SEEN);
10332 break;
10334 case OMP_CLAUSE_MAP:
10335 if (code == OMP_TARGET_EXIT_DATA
10336 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
10338 remove = true;
10339 break;
10341 decl = OMP_CLAUSE_DECL (c);
10342 /* Data clauses associated with reductions must be
10343 compatible with present_or_copy. Warn and adjust the clause
10344 if that is not the case. */
10345 if (ctx->region_type == ORT_ACC_PARALLEL
10346 || ctx->region_type == ORT_ACC_SERIAL)
10348 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
10349 n = NULL;
10351 if (DECL_P (t))
10352 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
10354 if (n && (n->value & GOVD_REDUCTION))
10356 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
10358 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
10359 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
10360 && kind != GOMP_MAP_FORCE_PRESENT
10361 && kind != GOMP_MAP_POINTER)
10363 warning_at (OMP_CLAUSE_LOCATION (c), 0,
10364 "incompatible data clause with reduction "
10365 "on %qE; promoting to %<present_or_copy%>",
10366 DECL_NAME (t));
10367 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
10371 if (!DECL_P (decl))
10373 if ((ctx->region_type & ORT_TARGET) != 0
10374 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
10376 if (TREE_CODE (decl) == INDIRECT_REF
10377 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10378 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10379 == REFERENCE_TYPE))
10380 decl = TREE_OPERAND (decl, 0);
10381 if (TREE_CODE (decl) == COMPONENT_REF)
10383 while (TREE_CODE (decl) == COMPONENT_REF)
10384 decl = TREE_OPERAND (decl, 0);
10385 if (DECL_P (decl))
10387 n = splay_tree_lookup (ctx->variables,
10388 (splay_tree_key) decl);
10389 if (!(n->value & GOVD_SEEN))
10390 remove = true;
10394 break;
10396 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10397 if ((ctx->region_type & ORT_TARGET) != 0
10398 && !(n->value & GOVD_SEEN)
10399 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
10400 && (!is_global_var (decl)
10401 || !lookup_attribute ("omp declare target link",
10402 DECL_ATTRIBUTES (decl))))
10404 remove = true;
10405 /* For struct element mapping, if struct is never referenced
10406 in target block and none of the mapping has always modifier,
10407 remove all the struct element mappings, which immediately
10408 follow the GOMP_MAP_STRUCT map clause. */
10409 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
10411 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
10412 while (cnt--)
10413 OMP_CLAUSE_CHAIN (c)
10414 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
10417 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
10418 && (code == OMP_TARGET_EXIT_DATA
10419 || code == OACC_EXIT_DATA))
10420 remove = true;
10421 else if (DECL_SIZE (decl)
10422 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
10423 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
10424 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
10425 && (OMP_CLAUSE_MAP_KIND (c)
10426 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10428 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10429 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10430 INTEGER_CST. */
10431 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
10433 tree decl2 = DECL_VALUE_EXPR (decl);
10434 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10435 decl2 = TREE_OPERAND (decl2, 0);
10436 gcc_assert (DECL_P (decl2));
10437 tree mem = build_simple_mem_ref (decl2);
10438 OMP_CLAUSE_DECL (c) = mem;
10439 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10440 if (ctx->outer_context)
10442 omp_notice_variable (ctx->outer_context, decl2, true);
10443 omp_notice_variable (ctx->outer_context,
10444 OMP_CLAUSE_SIZE (c), true);
10446 if (((ctx->region_type & ORT_TARGET) != 0
10447 || !ctx->target_firstprivatize_array_bases)
10448 && ((n->value & GOVD_SEEN) == 0
10449 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
10451 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10452 OMP_CLAUSE_MAP);
10453 OMP_CLAUSE_DECL (nc) = decl;
10454 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10455 if (ctx->target_firstprivatize_array_bases)
10456 OMP_CLAUSE_SET_MAP_KIND (nc,
10457 GOMP_MAP_FIRSTPRIVATE_POINTER);
10458 else
10459 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10460 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
10461 OMP_CLAUSE_CHAIN (c) = nc;
10462 c = nc;
10465 else
10467 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10468 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10469 gcc_assert ((n->value & GOVD_SEEN) == 0
10470 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10471 == 0));
10473 break;
10475 case OMP_CLAUSE_TO:
10476 case OMP_CLAUSE_FROM:
10477 case OMP_CLAUSE__CACHE_:
10478 decl = OMP_CLAUSE_DECL (c);
10479 if (!DECL_P (decl))
10480 break;
10481 if (DECL_SIZE (decl)
10482 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10484 tree decl2 = DECL_VALUE_EXPR (decl);
10485 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10486 decl2 = TREE_OPERAND (decl2, 0);
10487 gcc_assert (DECL_P (decl2));
10488 tree mem = build_simple_mem_ref (decl2);
10489 OMP_CLAUSE_DECL (c) = mem;
10490 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10491 if (ctx->outer_context)
10493 omp_notice_variable (ctx->outer_context, decl2, true);
10494 omp_notice_variable (ctx->outer_context,
10495 OMP_CLAUSE_SIZE (c), true);
10498 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10499 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10500 break;
10502 case OMP_CLAUSE_REDUCTION:
10503 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10505 decl = OMP_CLAUSE_DECL (c);
10506 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10507 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
10509 remove = true;
10510 error_at (OMP_CLAUSE_LOCATION (c),
10511 "%qD specified in %<inscan%> %<reduction%> clause "
10512 "but not in %<scan%> directive clause", decl);
10513 break;
10515 has_inscan_reductions = true;
10517 /* FALLTHRU */
10518 case OMP_CLAUSE_IN_REDUCTION:
10519 case OMP_CLAUSE_TASK_REDUCTION:
10520 decl = OMP_CLAUSE_DECL (c);
10521 /* OpenACC reductions need a present_or_copy data clause.
10522 Add one if necessary. Emit error when the reduction is private. */
10523 if (ctx->region_type == ORT_ACC_PARALLEL
10524 || ctx->region_type == ORT_ACC_SERIAL)
10526 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10527 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10529 remove = true;
10530 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
10531 "reduction on %qE", DECL_NAME (decl));
10533 else if ((n->value & GOVD_MAP) == 0)
10535 tree next = OMP_CLAUSE_CHAIN (c);
10536 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
10537 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
10538 OMP_CLAUSE_DECL (nc) = decl;
10539 OMP_CLAUSE_CHAIN (c) = nc;
10540 lang_hooks.decls.omp_finish_clause (nc, pre_p);
10541 while (1)
10543 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
10544 if (OMP_CLAUSE_CHAIN (nc) == NULL)
10545 break;
10546 nc = OMP_CLAUSE_CHAIN (nc);
10548 OMP_CLAUSE_CHAIN (nc) = next;
10549 n->value |= GOVD_MAP;
10552 if (DECL_P (decl)
10553 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10554 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10555 break;
10556 case OMP_CLAUSE_COPYIN:
10557 case OMP_CLAUSE_COPYPRIVATE:
10558 case OMP_CLAUSE_IF:
10559 case OMP_CLAUSE_NUM_THREADS:
10560 case OMP_CLAUSE_NUM_TEAMS:
10561 case OMP_CLAUSE_THREAD_LIMIT:
10562 case OMP_CLAUSE_DIST_SCHEDULE:
10563 case OMP_CLAUSE_DEVICE:
10564 case OMP_CLAUSE_SCHEDULE:
10565 case OMP_CLAUSE_NOWAIT:
10566 case OMP_CLAUSE_ORDERED:
10567 case OMP_CLAUSE_DEFAULT:
10568 case OMP_CLAUSE_UNTIED:
10569 case OMP_CLAUSE_COLLAPSE:
10570 case OMP_CLAUSE_FINAL:
10571 case OMP_CLAUSE_MERGEABLE:
10572 case OMP_CLAUSE_PROC_BIND:
10573 case OMP_CLAUSE_SAFELEN:
10574 case OMP_CLAUSE_SIMDLEN:
10575 case OMP_CLAUSE_DEPEND:
10576 case OMP_CLAUSE_PRIORITY:
10577 case OMP_CLAUSE_GRAINSIZE:
10578 case OMP_CLAUSE_NUM_TASKS:
10579 case OMP_CLAUSE_NOGROUP:
10580 case OMP_CLAUSE_THREADS:
10581 case OMP_CLAUSE_SIMD:
10582 case OMP_CLAUSE_HINT:
10583 case OMP_CLAUSE_DEFAULTMAP:
10584 case OMP_CLAUSE_ORDER:
10585 case OMP_CLAUSE_BIND:
10586 case OMP_CLAUSE_USE_DEVICE_PTR:
10587 case OMP_CLAUSE_USE_DEVICE_ADDR:
10588 case OMP_CLAUSE_IS_DEVICE_PTR:
10589 case OMP_CLAUSE_ASYNC:
10590 case OMP_CLAUSE_WAIT:
10591 case OMP_CLAUSE_INDEPENDENT:
10592 case OMP_CLAUSE_NUM_GANGS:
10593 case OMP_CLAUSE_NUM_WORKERS:
10594 case OMP_CLAUSE_VECTOR_LENGTH:
10595 case OMP_CLAUSE_GANG:
10596 case OMP_CLAUSE_WORKER:
10597 case OMP_CLAUSE_VECTOR:
10598 case OMP_CLAUSE_AUTO:
10599 case OMP_CLAUSE_SEQ:
10600 case OMP_CLAUSE_TILE:
10601 case OMP_CLAUSE_IF_PRESENT:
10602 case OMP_CLAUSE_FINALIZE:
10603 case OMP_CLAUSE_INCLUSIVE:
10604 case OMP_CLAUSE_EXCLUSIVE:
10605 break;
10607 default:
10608 gcc_unreachable ();
10611 if (remove)
10612 *list_p = OMP_CLAUSE_CHAIN (c);
10613 else
10614 list_p = &OMP_CLAUSE_CHAIN (c);
10617 /* Add in any implicit data sharing. */
10618 struct gimplify_adjust_omp_clauses_data data;
10619 data.list_p = list_p;
10620 data.pre_p = pre_p;
10621 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
10623 if (has_inscan_reductions)
10624 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
10625 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10626 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10628 error_at (OMP_CLAUSE_LOCATION (c),
10629 "%<inscan%> %<reduction%> clause used together with "
10630 "%<linear%> clause for a variable other than loop "
10631 "iterator");
10632 break;
10635 gimplify_omp_ctxp = ctx->outer_context;
10636 delete_omp_context (ctx);
10639 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
10640 -1 if unknown yet (simd is involved, won't be known until vectorization)
10641 and 1 if they do. If SCORES is non-NULL, it should point to an array
10642 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
10643 of the CONSTRUCTS (position -1 if it will never match) followed by
10644 number of constructs in the OpenMP context construct trait. If the
10645 score depends on whether it will be in a declare simd clone or not,
10646 the function returns 2 and there will be two sets of the scores, the first
10647 one for the case that it is not in a declare simd clone, the other
10648 that it is in a declare simd clone. */
10651 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
10652 int *scores)
10654 int matched = 0, cnt = 0;
10655 bool simd_seen = false;
10656 bool target_seen = false;
10657 int declare_simd_cnt = -1;
10658 auto_vec<enum tree_code, 16> codes;
10659 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
10661 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
10662 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
10663 == ORT_TARGET && ctx->code == OMP_TARGET)
10664 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
10665 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
10666 || (ctx->region_type == ORT_SIMD
10667 && ctx->code == OMP_SIMD
10668 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
10670 ++cnt;
10671 if (scores)
10672 codes.safe_push (ctx->code);
10673 else if (matched < nconstructs && ctx->code == constructs[matched])
10675 if (ctx->code == OMP_SIMD)
10677 if (matched)
10678 return 0;
10679 simd_seen = true;
10681 ++matched;
10683 if (ctx->code == OMP_TARGET)
10685 if (scores == NULL)
10686 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
10687 target_seen = true;
10688 break;
10691 else if (ctx->region_type == ORT_WORKSHARE
10692 && ctx->code == OMP_LOOP
10693 && ctx->outer_context
10694 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
10695 && ctx->outer_context->outer_context
10696 && ctx->outer_context->outer_context->code == OMP_LOOP
10697 && ctx->outer_context->outer_context->distribute)
10698 ctx = ctx->outer_context->outer_context;
10699 ctx = ctx->outer_context;
10701 if (!target_seen
10702 && lookup_attribute ("omp declare simd",
10703 DECL_ATTRIBUTES (current_function_decl)))
10705 /* Declare simd is a maybe case, it is supposed to be added only to the
10706 omp-simd-clone.c added clones and not to the base function. */
10707 declare_simd_cnt = cnt++;
10708 if (scores)
10709 codes.safe_push (OMP_SIMD);
10710 else if (cnt == 0
10711 && constructs[0] == OMP_SIMD)
10713 gcc_assert (matched == 0);
10714 simd_seen = true;
10715 if (++matched == nconstructs)
10716 return -1;
10719 if (tree attr = lookup_attribute ("omp declare variant variant",
10720 DECL_ATTRIBUTES (current_function_decl)))
10722 enum tree_code variant_constructs[5];
10723 int variant_nconstructs = 0;
10724 if (!target_seen)
10725 variant_nconstructs
10726 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
10727 variant_constructs);
10728 for (int i = 0; i < variant_nconstructs; i++)
10730 ++cnt;
10731 if (scores)
10732 codes.safe_push (variant_constructs[i]);
10733 else if (matched < nconstructs
10734 && variant_constructs[i] == constructs[matched])
10736 if (variant_constructs[i] == OMP_SIMD)
10738 if (matched)
10739 return 0;
10740 simd_seen = true;
10742 ++matched;
10746 if (!target_seen
10747 && lookup_attribute ("omp declare target block",
10748 DECL_ATTRIBUTES (current_function_decl)))
10750 if (scores)
10751 codes.safe_push (OMP_TARGET);
10752 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
10753 ++matched;
10755 if (scores)
10757 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
10759 int j = codes.length () - 1;
10760 for (int i = nconstructs - 1; i >= 0; i--)
10762 while (j >= 0
10763 && (pass != 0 || declare_simd_cnt != j)
10764 && constructs[i] != codes[j])
10765 --j;
10766 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
10767 *scores++ = j - 1;
10768 else
10769 *scores++ = j;
10771 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
10772 ? codes.length () - 1 : codes.length ());
10774 return declare_simd_cnt == -1 ? 1 : 2;
10776 if (matched == nconstructs)
10777 return simd_seen ? -1 : 1;
10778 return 0;
10781 /* Gimplify OACC_CACHE. */
10783 static void
10784 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
10786 tree expr = *expr_p;
10788 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
10789 OACC_CACHE);
10790 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
10791 OACC_CACHE);
10793 /* TODO: Do something sensible with this information. */
10795 *expr_p = NULL_TREE;
10798 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10799 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10800 kind. The entry kind will replace the one in CLAUSE, while the exit
10801 kind will be used in a new omp_clause and returned to the caller. */
10803 static tree
10804 gimplify_oacc_declare_1 (tree clause)
10806 HOST_WIDE_INT kind, new_op;
10807 bool ret = false;
10808 tree c = NULL;
10810 kind = OMP_CLAUSE_MAP_KIND (clause);
10812 switch (kind)
10814 case GOMP_MAP_ALLOC:
10815 new_op = GOMP_MAP_RELEASE;
10816 ret = true;
10817 break;
10819 case GOMP_MAP_FROM:
10820 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
10821 new_op = GOMP_MAP_FROM;
10822 ret = true;
10823 break;
10825 case GOMP_MAP_TOFROM:
10826 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
10827 new_op = GOMP_MAP_FROM;
10828 ret = true;
10829 break;
10831 case GOMP_MAP_DEVICE_RESIDENT:
10832 case GOMP_MAP_FORCE_DEVICEPTR:
10833 case GOMP_MAP_FORCE_PRESENT:
10834 case GOMP_MAP_LINK:
10835 case GOMP_MAP_POINTER:
10836 case GOMP_MAP_TO:
10837 break;
10839 default:
10840 gcc_unreachable ();
10841 break;
10844 if (ret)
10846 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
10847 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
10848 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
10851 return c;
10854 /* Gimplify OACC_DECLARE. */
10856 static void
10857 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
10859 tree expr = *expr_p;
10860 gomp_target *stmt;
10861 tree clauses, t, decl;
10863 clauses = OACC_DECLARE_CLAUSES (expr);
10865 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
10866 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
10868 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
10870 decl = OMP_CLAUSE_DECL (t);
10872 if (TREE_CODE (decl) == MEM_REF)
10873 decl = TREE_OPERAND (decl, 0);
10875 if (VAR_P (decl) && !is_oacc_declared (decl))
10877 tree attr = get_identifier ("oacc declare target");
10878 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
10879 DECL_ATTRIBUTES (decl));
10882 if (VAR_P (decl)
10883 && !is_global_var (decl)
10884 && DECL_CONTEXT (decl) == current_function_decl)
10886 tree c = gimplify_oacc_declare_1 (t);
10887 if (c)
10889 if (oacc_declare_returns == NULL)
10890 oacc_declare_returns = new hash_map<tree, tree>;
10892 oacc_declare_returns->put (decl, c);
10896 if (gimplify_omp_ctxp)
10897 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
10900 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
10901 clauses);
10903 gimplify_seq_add_stmt (pre_p, stmt);
10905 *expr_p = NULL_TREE;
10908 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
10909 gimplification of the body, as well as scanning the body for used
10910 variables. We need to do this scan now, because variable-sized
10911 decls will be decomposed during gimplification. */
10913 static void
10914 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
10916 tree expr = *expr_p;
10917 gimple *g;
10918 gimple_seq body = NULL;
10920 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
10921 OMP_PARALLEL_COMBINED (expr)
10922 ? ORT_COMBINED_PARALLEL
10923 : ORT_PARALLEL, OMP_PARALLEL);
10925 push_gimplify_context ();
10927 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
10928 if (gimple_code (g) == GIMPLE_BIND)
10929 pop_gimplify_context (g);
10930 else
10931 pop_gimplify_context (NULL);
10933 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
10934 OMP_PARALLEL);
10936 g = gimple_build_omp_parallel (body,
10937 OMP_PARALLEL_CLAUSES (expr),
10938 NULL_TREE, NULL_TREE);
10939 if (OMP_PARALLEL_COMBINED (expr))
10940 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
10941 gimplify_seq_add_stmt (pre_p, g);
10942 *expr_p = NULL_TREE;
10945 /* Gimplify the contents of an OMP_TASK statement. This involves
10946 gimplification of the body, as well as scanning the body for used
10947 variables. We need to do this scan now, because variable-sized
10948 decls will be decomposed during gimplification. */
10950 static void
10951 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
10953 tree expr = *expr_p;
10954 gimple *g;
10955 gimple_seq body = NULL;
10957 if (OMP_TASK_BODY (expr) == NULL_TREE)
10958 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10959 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10960 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
10962 error_at (OMP_CLAUSE_LOCATION (c),
10963 "%<mutexinoutset%> kind in %<depend%> clause on a "
10964 "%<taskwait%> construct");
10965 break;
10968 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
10969 omp_find_clause (OMP_TASK_CLAUSES (expr),
10970 OMP_CLAUSE_UNTIED)
10971 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
10973 if (OMP_TASK_BODY (expr))
10975 push_gimplify_context ();
10977 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
10978 if (gimple_code (g) == GIMPLE_BIND)
10979 pop_gimplify_context (g);
10980 else
10981 pop_gimplify_context (NULL);
10984 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
10985 OMP_TASK);
10987 g = gimple_build_omp_task (body,
10988 OMP_TASK_CLAUSES (expr),
10989 NULL_TREE, NULL_TREE,
10990 NULL_TREE, NULL_TREE, NULL_TREE);
10991 if (OMP_TASK_BODY (expr) == NULL_TREE)
10992 gimple_omp_task_set_taskwait_p (g, true);
10993 gimplify_seq_add_stmt (pre_p, g);
10994 *expr_p = NULL_TREE;
10997 /* Gimplify the gross structure of an OMP_FOR statement. */
10999 static enum gimplify_status
11000 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
11002 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
11003 enum gimplify_status ret = GS_ALL_DONE;
11004 enum gimplify_status tret;
11005 gomp_for *gfor;
11006 gimple_seq for_body, for_pre_body;
11007 int i;
11008 bitmap has_decl_expr = NULL;
11009 enum omp_region_type ort = ORT_WORKSHARE;
11011 orig_for_stmt = for_stmt = *expr_p;
11013 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
11014 != NULL_TREE);
11015 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11017 tree *data[4] = { NULL, NULL, NULL, NULL };
11018 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
11019 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
11020 find_combined_omp_for, data, NULL);
11021 if (inner_for_stmt == NULL_TREE)
11023 gcc_assert (seen_error ());
11024 *expr_p = NULL_TREE;
11025 return GS_ERROR;
11027 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
11029 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
11030 &OMP_FOR_PRE_BODY (for_stmt));
11031 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
11033 if (OMP_FOR_PRE_BODY (inner_for_stmt))
11035 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
11036 &OMP_FOR_PRE_BODY (for_stmt));
11037 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
11040 if (data[0])
11042 /* We have some statements or variable declarations in between
11043 the composite construct directives. Move them around the
11044 inner_for_stmt. */
11045 data[0] = expr_p;
11046 for (i = 0; i < 3; i++)
11047 if (data[i])
11049 tree t = *data[i];
11050 if (i < 2 && data[i + 1] == &OMP_BODY (t))
11051 data[i + 1] = data[i];
11052 *data[i] = OMP_BODY (t);
11053 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
11054 NULL_TREE, make_node (BLOCK));
11055 OMP_BODY (t) = body;
11056 append_to_statement_list_force (inner_for_stmt,
11057 &BIND_EXPR_BODY (body));
11058 *data[3] = t;
11059 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
11060 gcc_assert (*data[3] == inner_for_stmt);
11062 return GS_OK;
11065 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11066 if (!loop_p
11067 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
11068 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11069 i)) == TREE_LIST
11070 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11071 i)))
11073 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11074 /* Class iterators aren't allowed on OMP_SIMD, so the only
11075 case we need to solve is distribute parallel for. They are
11076 allowed on the loop construct, but that is already handled
11077 in gimplify_omp_loop. */
11078 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
11079 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
11080 && data[1]);
11081 tree orig_decl = TREE_PURPOSE (orig);
11082 tree last = TREE_VALUE (orig);
11083 tree *pc;
11084 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
11085 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
11086 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
11087 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
11088 && OMP_CLAUSE_DECL (*pc) == orig_decl)
11089 break;
11090 if (*pc == NULL_TREE)
11092 tree *spc;
11093 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
11094 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
11095 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
11096 && OMP_CLAUSE_DECL (*spc) == orig_decl)
11097 break;
11098 if (*spc)
11100 tree c = *spc;
11101 *spc = OMP_CLAUSE_CHAIN (c);
11102 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
11103 *pc = c;
11106 if (*pc == NULL_TREE)
11108 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
11110 /* private clause will appear only on inner_for_stmt.
11111 Change it into firstprivate, and add private clause
11112 on for_stmt. */
11113 tree c = copy_node (*pc);
11114 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11115 OMP_FOR_CLAUSES (for_stmt) = c;
11116 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
11117 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
11119 else
11121 /* lastprivate clause will appear on both inner_for_stmt
11122 and for_stmt. Add firstprivate clause to
11123 inner_for_stmt. */
11124 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
11125 OMP_CLAUSE_FIRSTPRIVATE);
11126 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
11127 OMP_CLAUSE_CHAIN (c) = *pc;
11128 *pc = c;
11129 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
11131 tree c = build_omp_clause (UNKNOWN_LOCATION,
11132 OMP_CLAUSE_FIRSTPRIVATE);
11133 OMP_CLAUSE_DECL (c) = last;
11134 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11135 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11136 c = build_omp_clause (UNKNOWN_LOCATION,
11137 *pc ? OMP_CLAUSE_SHARED
11138 : OMP_CLAUSE_FIRSTPRIVATE);
11139 OMP_CLAUSE_DECL (c) = orig_decl;
11140 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11141 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11143 /* Similarly, take care of C++ range for temporaries, those should
11144 be firstprivate on OMP_PARALLEL if any. */
11145 if (data[1])
11146 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11147 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
11148 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11149 i)) == TREE_LIST
11150 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11151 i)))
11153 tree orig
11154 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11155 tree v = TREE_CHAIN (orig);
11156 tree c = build_omp_clause (UNKNOWN_LOCATION,
11157 OMP_CLAUSE_FIRSTPRIVATE);
11158 /* First add firstprivate clause for the __for_end artificial
11159 decl. */
11160 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
11161 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11162 == REFERENCE_TYPE)
11163 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11164 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11165 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11166 if (TREE_VEC_ELT (v, 0))
11168 /* And now the same for __for_range artificial decl if it
11169 exists. */
11170 c = build_omp_clause (UNKNOWN_LOCATION,
11171 OMP_CLAUSE_FIRSTPRIVATE);
11172 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
11173 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11174 == REFERENCE_TYPE)
11175 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11176 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11177 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11182 switch (TREE_CODE (for_stmt))
11184 case OMP_FOR:
11185 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
11187 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11188 OMP_CLAUSE_SCHEDULE))
11189 error_at (EXPR_LOCATION (for_stmt),
11190 "%qs clause may not appear on non-rectangular %qs",
11191 "schedule", "for");
11192 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
11193 error_at (EXPR_LOCATION (for_stmt),
11194 "%qs clause may not appear on non-rectangular %qs",
11195 "ordered", "for");
11197 break;
11198 case OMP_DISTRIBUTE:
11199 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
11200 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11201 OMP_CLAUSE_DIST_SCHEDULE))
11202 error_at (EXPR_LOCATION (for_stmt),
11203 "%qs clause may not appear on non-rectangular %qs",
11204 "dist_schedule", "distribute");
11205 break;
11206 case OACC_LOOP:
11207 ort = ORT_ACC;
11208 break;
11209 case OMP_TASKLOOP:
11210 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
11211 ort = ORT_UNTIED_TASKLOOP;
11212 else
11213 ort = ORT_TASKLOOP;
11214 break;
11215 case OMP_SIMD:
11216 ort = ORT_SIMD;
11217 break;
11218 default:
11219 gcc_unreachable ();
11222 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
11223 clause for the IV. */
11224 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11226 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
11227 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11228 decl = TREE_OPERAND (t, 0);
11229 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11230 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11231 && OMP_CLAUSE_DECL (c) == decl)
11233 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11234 break;
11238 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
11239 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
11240 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
11241 ? OMP_LOOP : TREE_CODE (for_stmt));
11243 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
11244 gimplify_omp_ctxp->distribute = true;
11246 /* Handle OMP_FOR_INIT. */
11247 for_pre_body = NULL;
11248 if ((ort == ORT_SIMD
11249 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
11250 && OMP_FOR_PRE_BODY (for_stmt))
11252 has_decl_expr = BITMAP_ALLOC (NULL);
11253 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
11254 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
11255 == VAR_DECL)
11257 t = OMP_FOR_PRE_BODY (for_stmt);
11258 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11260 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
11262 tree_stmt_iterator si;
11263 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
11264 tsi_next (&si))
11266 t = tsi_stmt (si);
11267 if (TREE_CODE (t) == DECL_EXPR
11268 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
11269 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11273 if (OMP_FOR_PRE_BODY (for_stmt))
11275 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
11276 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11277 else
11279 struct gimplify_omp_ctx ctx;
11280 memset (&ctx, 0, sizeof (ctx));
11281 ctx.region_type = ORT_NONE;
11282 gimplify_omp_ctxp = &ctx;
11283 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11284 gimplify_omp_ctxp = NULL;
11287 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
11289 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11290 for_stmt = inner_for_stmt;
11292 /* For taskloop, need to gimplify the start, end and step before the
11293 taskloop, outside of the taskloop omp context. */
11294 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11296 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11298 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11299 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
11301 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
11302 TREE_OPERAND (t, 1)
11303 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
11304 gimple_seq_empty_p (for_pre_body)
11305 ? pre_p : &for_pre_body, NULL,
11306 false);
11307 /* Reference to pointer conversion is considered useless,
11308 but is significant for firstprivate clause. Force it
11309 here. */
11310 if (TREE_CODE (type) == POINTER_TYPE
11311 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
11312 == REFERENCE_TYPE))
11314 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11315 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
11316 TREE_OPERAND (t, 1));
11317 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
11318 ? pre_p : &for_pre_body);
11319 TREE_OPERAND (t, 1) = v;
11321 tree c = build_omp_clause (input_location,
11322 OMP_CLAUSE_FIRSTPRIVATE);
11323 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
11324 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11325 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11328 /* Handle OMP_FOR_COND. */
11329 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11330 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
11332 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
11333 TREE_OPERAND (t, 1)
11334 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
11335 gimple_seq_empty_p (for_pre_body)
11336 ? pre_p : &for_pre_body, NULL,
11337 false);
11338 /* Reference to pointer conversion is considered useless,
11339 but is significant for firstprivate clause. Force it
11340 here. */
11341 if (TREE_CODE (type) == POINTER_TYPE
11342 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
11343 == REFERENCE_TYPE))
11345 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11346 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
11347 TREE_OPERAND (t, 1));
11348 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
11349 ? pre_p : &for_pre_body);
11350 TREE_OPERAND (t, 1) = v;
11352 tree c = build_omp_clause (input_location,
11353 OMP_CLAUSE_FIRSTPRIVATE);
11354 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
11355 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11356 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11359 /* Handle OMP_FOR_INCR. */
11360 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11361 if (TREE_CODE (t) == MODIFY_EXPR)
11363 decl = TREE_OPERAND (t, 0);
11364 t = TREE_OPERAND (t, 1);
11365 tree *tp = &TREE_OPERAND (t, 1);
11366 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
11367 tp = &TREE_OPERAND (t, 0);
11369 if (!is_gimple_constant (*tp))
11371 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
11372 ? pre_p : &for_pre_body;
11373 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
11374 tree c = build_omp_clause (input_location,
11375 OMP_CLAUSE_FIRSTPRIVATE);
11376 OMP_CLAUSE_DECL (c) = *tp;
11377 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11378 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11383 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
11384 OMP_TASKLOOP);
11387 if (orig_for_stmt != for_stmt)
11388 gimplify_omp_ctxp->combined_loop = true;
11390 for_body = NULL;
11391 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11392 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
11393 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11394 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
11396 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
11397 bool is_doacross = false;
11398 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
11400 is_doacross = true;
11401 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
11402 (OMP_FOR_INIT (for_stmt))
11403 * 2);
11405 int collapse = 1, tile = 0;
11406 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
11407 if (c)
11408 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
11409 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
11410 if (c)
11411 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
11412 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11414 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11415 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11416 decl = TREE_OPERAND (t, 0);
11417 gcc_assert (DECL_P (decl));
11418 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
11419 || POINTER_TYPE_P (TREE_TYPE (decl)));
11420 if (is_doacross)
11422 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
11424 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11425 if (TREE_CODE (orig_decl) == TREE_LIST)
11427 orig_decl = TREE_PURPOSE (orig_decl);
11428 if (!orig_decl)
11429 orig_decl = decl;
11431 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
11433 else
11434 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11435 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11438 /* Make sure the iteration variable is private. */
11439 tree c = NULL_TREE;
11440 tree c2 = NULL_TREE;
11441 if (orig_for_stmt != for_stmt)
11443 /* Preserve this information until we gimplify the inner simd. */
11444 if (has_decl_expr
11445 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11446 TREE_PRIVATE (t) = 1;
11448 else if (ort == ORT_SIMD)
11450 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11451 (splay_tree_key) decl);
11452 omp_is_private (gimplify_omp_ctxp, decl,
11453 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11454 != 1));
11455 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
11457 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11458 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
11459 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11460 OMP_CLAUSE_LASTPRIVATE);
11461 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11462 OMP_CLAUSE_LASTPRIVATE))
11463 if (OMP_CLAUSE_DECL (c3) == decl)
11465 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11466 "conditional %<lastprivate%> on loop "
11467 "iterator %qD ignored", decl);
11468 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11469 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11472 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
11474 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11475 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11476 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
11477 if ((has_decl_expr
11478 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11479 || TREE_PRIVATE (t))
11481 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11482 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11484 struct gimplify_omp_ctx *outer
11485 = gimplify_omp_ctxp->outer_context;
11486 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11488 if (outer->region_type == ORT_WORKSHARE
11489 && outer->combined_loop)
11491 n = splay_tree_lookup (outer->variables,
11492 (splay_tree_key)decl);
11493 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11495 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11496 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11498 else
11500 struct gimplify_omp_ctx *octx = outer->outer_context;
11501 if (octx
11502 && octx->region_type == ORT_COMBINED_PARALLEL
11503 && octx->outer_context
11504 && (octx->outer_context->region_type
11505 == ORT_WORKSHARE)
11506 && octx->outer_context->combined_loop)
11508 octx = octx->outer_context;
11509 n = splay_tree_lookup (octx->variables,
11510 (splay_tree_key)decl);
11511 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11513 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11514 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11521 OMP_CLAUSE_DECL (c) = decl;
11522 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11523 OMP_FOR_CLAUSES (for_stmt) = c;
11524 omp_add_variable (gimplify_omp_ctxp, decl, flags);
11525 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11527 if (outer->region_type == ORT_WORKSHARE
11528 && outer->combined_loop)
11530 if (outer->outer_context
11531 && (outer->outer_context->region_type
11532 == ORT_COMBINED_PARALLEL))
11533 outer = outer->outer_context;
11534 else if (omp_check_private (outer, decl, false))
11535 outer = NULL;
11537 else if (((outer->region_type & ORT_TASKLOOP)
11538 == ORT_TASKLOOP)
11539 && outer->combined_loop
11540 && !omp_check_private (gimplify_omp_ctxp,
11541 decl, false))
11543 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11545 omp_notice_variable (outer, decl, true);
11546 outer = NULL;
11548 if (outer)
11550 n = splay_tree_lookup (outer->variables,
11551 (splay_tree_key)decl);
11552 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11554 omp_add_variable (outer, decl,
11555 GOVD_LASTPRIVATE | GOVD_SEEN);
11556 if (outer->region_type == ORT_COMBINED_PARALLEL
11557 && outer->outer_context
11558 && (outer->outer_context->region_type
11559 == ORT_WORKSHARE)
11560 && outer->outer_context->combined_loop)
11562 outer = outer->outer_context;
11563 n = splay_tree_lookup (outer->variables,
11564 (splay_tree_key)decl);
11565 if (omp_check_private (outer, decl, false))
11566 outer = NULL;
11567 else if (n == NULL
11568 || ((n->value & GOVD_DATA_SHARE_CLASS)
11569 == 0))
11570 omp_add_variable (outer, decl,
11571 GOVD_LASTPRIVATE
11572 | GOVD_SEEN);
11573 else
11574 outer = NULL;
11576 if (outer && outer->outer_context
11577 && ((outer->outer_context->region_type
11578 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11579 || (((outer->region_type & ORT_TASKLOOP)
11580 == ORT_TASKLOOP)
11581 && (outer->outer_context->region_type
11582 == ORT_COMBINED_PARALLEL))))
11584 outer = outer->outer_context;
11585 n = splay_tree_lookup (outer->variables,
11586 (splay_tree_key)decl);
11587 if (n == NULL
11588 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11589 omp_add_variable (outer, decl,
11590 GOVD_SHARED | GOVD_SEEN);
11591 else
11592 outer = NULL;
11594 if (outer && outer->outer_context)
11595 omp_notice_variable (outer->outer_context, decl,
11596 true);
11601 else
11603 bool lastprivate
11604 = (!has_decl_expr
11605 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
11606 if (TREE_PRIVATE (t))
11607 lastprivate = false;
11608 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
11610 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11611 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
11612 lastprivate = false;
11615 struct gimplify_omp_ctx *outer
11616 = gimplify_omp_ctxp->outer_context;
11617 if (outer && lastprivate)
11619 if (outer->region_type == ORT_WORKSHARE
11620 && outer->combined_loop)
11622 n = splay_tree_lookup (outer->variables,
11623 (splay_tree_key)decl);
11624 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11626 lastprivate = false;
11627 outer = NULL;
11629 else if (outer->outer_context
11630 && (outer->outer_context->region_type
11631 == ORT_COMBINED_PARALLEL))
11632 outer = outer->outer_context;
11633 else if (omp_check_private (outer, decl, false))
11634 outer = NULL;
11636 else if (((outer->region_type & ORT_TASKLOOP)
11637 == ORT_TASKLOOP)
11638 && outer->combined_loop
11639 && !omp_check_private (gimplify_omp_ctxp,
11640 decl, false))
11642 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11644 omp_notice_variable (outer, decl, true);
11645 outer = NULL;
11647 if (outer)
11649 n = splay_tree_lookup (outer->variables,
11650 (splay_tree_key)decl);
11651 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11653 omp_add_variable (outer, decl,
11654 GOVD_LASTPRIVATE | GOVD_SEEN);
11655 if (outer->region_type == ORT_COMBINED_PARALLEL
11656 && outer->outer_context
11657 && (outer->outer_context->region_type
11658 == ORT_WORKSHARE)
11659 && outer->outer_context->combined_loop)
11661 outer = outer->outer_context;
11662 n = splay_tree_lookup (outer->variables,
11663 (splay_tree_key)decl);
11664 if (omp_check_private (outer, decl, false))
11665 outer = NULL;
11666 else if (n == NULL
11667 || ((n->value & GOVD_DATA_SHARE_CLASS)
11668 == 0))
11669 omp_add_variable (outer, decl,
11670 GOVD_LASTPRIVATE
11671 | GOVD_SEEN);
11672 else
11673 outer = NULL;
11675 if (outer && outer->outer_context
11676 && ((outer->outer_context->region_type
11677 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11678 || (((outer->region_type & ORT_TASKLOOP)
11679 == ORT_TASKLOOP)
11680 && (outer->outer_context->region_type
11681 == ORT_COMBINED_PARALLEL))))
11683 outer = outer->outer_context;
11684 n = splay_tree_lookup (outer->variables,
11685 (splay_tree_key)decl);
11686 if (n == NULL
11687 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11688 omp_add_variable (outer, decl,
11689 GOVD_SHARED | GOVD_SEEN);
11690 else
11691 outer = NULL;
11693 if (outer && outer->outer_context)
11694 omp_notice_variable (outer->outer_context, decl,
11695 true);
11700 c = build_omp_clause (input_location,
11701 lastprivate ? OMP_CLAUSE_LASTPRIVATE
11702 : OMP_CLAUSE_PRIVATE);
11703 OMP_CLAUSE_DECL (c) = decl;
11704 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11705 OMP_FOR_CLAUSES (for_stmt) = c;
11706 omp_add_variable (gimplify_omp_ctxp, decl,
11707 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
11708 | GOVD_EXPLICIT | GOVD_SEEN);
11709 c = NULL_TREE;
11712 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
11714 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11715 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11716 (splay_tree_key) decl);
11717 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
11718 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11719 OMP_CLAUSE_LASTPRIVATE);
11720 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11721 OMP_CLAUSE_LASTPRIVATE))
11722 if (OMP_CLAUSE_DECL (c3) == decl)
11724 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11725 "conditional %<lastprivate%> on loop "
11726 "iterator %qD ignored", decl);
11727 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11728 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11731 else
11732 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
11734 /* If DECL is not a gimple register, create a temporary variable to act
11735 as an iteration counter. This is valid, since DECL cannot be
11736 modified in the body of the loop. Similarly for any iteration vars
11737 in simd with collapse > 1 where the iterator vars must be
11738 lastprivate. */
11739 if (orig_for_stmt != for_stmt)
11740 var = decl;
11741 else if (!is_gimple_reg (decl)
11742 || (ort == ORT_SIMD
11743 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
11745 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11746 /* Make sure omp_add_variable is not called on it prematurely.
11747 We call it ourselves a few lines later. */
11748 gimplify_omp_ctxp = NULL;
11749 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11750 gimplify_omp_ctxp = ctx;
11751 TREE_OPERAND (t, 0) = var;
11753 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
11755 if (ort == ORT_SIMD
11756 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11758 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11759 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
11760 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
11761 OMP_CLAUSE_DECL (c2) = var;
11762 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
11763 OMP_FOR_CLAUSES (for_stmt) = c2;
11764 omp_add_variable (gimplify_omp_ctxp, var,
11765 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
11766 if (c == NULL_TREE)
11768 c = c2;
11769 c2 = NULL_TREE;
11772 else
11773 omp_add_variable (gimplify_omp_ctxp, var,
11774 GOVD_PRIVATE | GOVD_SEEN);
11776 else
11777 var = decl;
11779 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
11781 tree lb = TREE_OPERAND (t, 1);
11782 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
11783 is_gimple_val, fb_rvalue, false);
11784 ret = MIN (ret, tret);
11785 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
11786 is_gimple_val, fb_rvalue, false);
11788 else
11789 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11790 is_gimple_val, fb_rvalue, false);
11791 ret = MIN (ret, tret);
11792 if (ret == GS_ERROR)
11793 return ret;
11795 /* Handle OMP_FOR_COND. */
11796 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11797 gcc_assert (COMPARISON_CLASS_P (t));
11798 gcc_assert (TREE_OPERAND (t, 0) == decl);
11800 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
11802 tree ub = TREE_OPERAND (t, 1);
11803 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
11804 is_gimple_val, fb_rvalue, false);
11805 ret = MIN (ret, tret);
11806 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
11807 is_gimple_val, fb_rvalue, false);
11809 else
11810 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11811 is_gimple_val, fb_rvalue, false);
11812 ret = MIN (ret, tret);
11814 /* Handle OMP_FOR_INCR. */
11815 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11816 switch (TREE_CODE (t))
11818 case PREINCREMENT_EXPR:
11819 case POSTINCREMENT_EXPR:
11821 tree decl = TREE_OPERAND (t, 0);
11822 /* c_omp_for_incr_canonicalize_ptr() should have been
11823 called to massage things appropriately. */
11824 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11826 if (orig_for_stmt != for_stmt)
11827 break;
11828 t = build_int_cst (TREE_TYPE (decl), 1);
11829 if (c)
11830 OMP_CLAUSE_LINEAR_STEP (c) = t;
11831 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11832 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11833 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11834 break;
11837 case PREDECREMENT_EXPR:
11838 case POSTDECREMENT_EXPR:
11839 /* c_omp_for_incr_canonicalize_ptr() should have been
11840 called to massage things appropriately. */
11841 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11842 if (orig_for_stmt != for_stmt)
11843 break;
11844 t = build_int_cst (TREE_TYPE (decl), -1);
11845 if (c)
11846 OMP_CLAUSE_LINEAR_STEP (c) = t;
11847 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11848 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11849 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11850 break;
11852 case MODIFY_EXPR:
11853 gcc_assert (TREE_OPERAND (t, 0) == decl);
11854 TREE_OPERAND (t, 0) = var;
11856 t = TREE_OPERAND (t, 1);
11857 switch (TREE_CODE (t))
11859 case PLUS_EXPR:
11860 if (TREE_OPERAND (t, 1) == decl)
11862 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
11863 TREE_OPERAND (t, 0) = var;
11864 break;
11867 /* Fallthru. */
11868 case MINUS_EXPR:
11869 case POINTER_PLUS_EXPR:
11870 gcc_assert (TREE_OPERAND (t, 0) == decl);
11871 TREE_OPERAND (t, 0) = var;
11872 break;
11873 default:
11874 gcc_unreachable ();
11877 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11878 is_gimple_val, fb_rvalue, false);
11879 ret = MIN (ret, tret);
11880 if (c)
11882 tree step = TREE_OPERAND (t, 1);
11883 tree stept = TREE_TYPE (decl);
11884 if (POINTER_TYPE_P (stept))
11885 stept = sizetype;
11886 step = fold_convert (stept, step);
11887 if (TREE_CODE (t) == MINUS_EXPR)
11888 step = fold_build1 (NEGATE_EXPR, stept, step);
11889 OMP_CLAUSE_LINEAR_STEP (c) = step;
11890 if (step != TREE_OPERAND (t, 1))
11892 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
11893 &for_pre_body, NULL,
11894 is_gimple_val, fb_rvalue, false);
11895 ret = MIN (ret, tret);
11898 break;
11900 default:
11901 gcc_unreachable ();
11904 if (c2)
11906 gcc_assert (c);
11907 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
11910 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
11912 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
11913 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11914 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
11915 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11916 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
11917 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
11918 && OMP_CLAUSE_DECL (c) == decl)
11920 if (is_doacross && (collapse == 1 || i >= collapse))
11921 t = var;
11922 else
11924 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11925 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11926 gcc_assert (TREE_OPERAND (t, 0) == var);
11927 t = TREE_OPERAND (t, 1);
11928 gcc_assert (TREE_CODE (t) == PLUS_EXPR
11929 || TREE_CODE (t) == MINUS_EXPR
11930 || TREE_CODE (t) == POINTER_PLUS_EXPR);
11931 gcc_assert (TREE_OPERAND (t, 0) == var);
11932 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
11933 is_doacross ? var : decl,
11934 TREE_OPERAND (t, 1));
11936 gimple_seq *seq;
11937 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11938 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
11939 else
11940 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
11941 push_gimplify_context ();
11942 gimplify_assign (decl, t, seq);
11943 gimple *bind = NULL;
11944 if (gimplify_ctxp->temps)
11946 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
11947 *seq = NULL;
11948 gimplify_seq_add_stmt (seq, bind);
11950 pop_gimplify_context (bind);
11953 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
11954 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
11956 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
11957 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11958 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
11959 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
11960 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
11961 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
11962 gcc_assert (COMPARISON_CLASS_P (t));
11963 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
11964 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
11965 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
11969 BITMAP_FREE (has_decl_expr);
11971 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
11972 || (loop_p && orig_for_stmt == for_stmt))
11974 push_gimplify_context ();
11975 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
11977 OMP_FOR_BODY (orig_for_stmt)
11978 = build3 (BIND_EXPR, void_type_node, NULL,
11979 OMP_FOR_BODY (orig_for_stmt), NULL);
11980 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
11984 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
11985 &for_body);
11987 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
11988 || (loop_p && orig_for_stmt == for_stmt))
11990 if (gimple_code (g) == GIMPLE_BIND)
11991 pop_gimplify_context (g);
11992 else
11993 pop_gimplify_context (NULL);
11996 if (orig_for_stmt != for_stmt)
11997 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11999 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12000 decl = TREE_OPERAND (t, 0);
12001 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12002 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12003 gimplify_omp_ctxp = ctx->outer_context;
12004 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
12005 gimplify_omp_ctxp = ctx;
12006 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
12007 TREE_OPERAND (t, 0) = var;
12008 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12009 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
12010 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
12011 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
12012 for (int j = i + 1;
12013 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
12015 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
12016 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12017 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12018 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12020 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
12021 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12023 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
12024 gcc_assert (COMPARISON_CLASS_P (t));
12025 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12026 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12028 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
12029 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12034 gimplify_adjust_omp_clauses (pre_p, for_body,
12035 &OMP_FOR_CLAUSES (orig_for_stmt),
12036 TREE_CODE (orig_for_stmt));
12038 int kind;
12039 switch (TREE_CODE (orig_for_stmt))
12041 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
12042 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
12043 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
12044 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
12045 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
12046 default:
12047 gcc_unreachable ();
12049 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
12051 gimplify_seq_add_seq (pre_p, for_pre_body);
12052 for_pre_body = NULL;
12054 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
12055 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
12056 for_pre_body);
12057 if (orig_for_stmt != for_stmt)
12058 gimple_omp_for_set_combined_p (gfor, true);
12059 if (gimplify_omp_ctxp
12060 && (gimplify_omp_ctxp->combined_loop
12061 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12062 && gimplify_omp_ctxp->outer_context
12063 && gimplify_omp_ctxp->outer_context->combined_loop)))
12065 gimple_omp_for_set_combined_into_p (gfor, true);
12066 if (gimplify_omp_ctxp->combined_loop)
12067 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
12068 else
12069 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
12072 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12074 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12075 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
12076 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
12077 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12078 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
12079 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
12080 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12081 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
12084 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
12085 constructs with GIMPLE_OMP_TASK sandwiched in between them.
12086 The outer taskloop stands for computing the number of iterations,
12087 counts for collapsed loops and holding taskloop specific clauses.
12088 The task construct stands for the effect of data sharing on the
12089 explicit task it creates and the inner taskloop stands for expansion
12090 of the static loop inside of the explicit task construct. */
12091 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12093 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
12094 tree task_clauses = NULL_TREE;
12095 tree c = *gfor_clauses_ptr;
12096 tree *gtask_clauses_ptr = &task_clauses;
12097 tree outer_for_clauses = NULL_TREE;
12098 tree *gforo_clauses_ptr = &outer_for_clauses;
12099 for (; c; c = OMP_CLAUSE_CHAIN (c))
12100 switch (OMP_CLAUSE_CODE (c))
12102 /* These clauses are allowed on task, move them there. */
12103 case OMP_CLAUSE_SHARED:
12104 case OMP_CLAUSE_FIRSTPRIVATE:
12105 case OMP_CLAUSE_DEFAULT:
12106 case OMP_CLAUSE_IF:
12107 case OMP_CLAUSE_UNTIED:
12108 case OMP_CLAUSE_FINAL:
12109 case OMP_CLAUSE_MERGEABLE:
12110 case OMP_CLAUSE_PRIORITY:
12111 case OMP_CLAUSE_REDUCTION:
12112 case OMP_CLAUSE_IN_REDUCTION:
12113 *gtask_clauses_ptr = c;
12114 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12115 break;
12116 case OMP_CLAUSE_PRIVATE:
12117 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
12119 /* We want private on outer for and firstprivate
12120 on task. */
12121 *gtask_clauses_ptr
12122 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12123 OMP_CLAUSE_FIRSTPRIVATE);
12124 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12125 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
12126 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12127 *gforo_clauses_ptr = c;
12128 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12130 else
12132 *gtask_clauses_ptr = c;
12133 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12135 break;
12136 /* These clauses go into outer taskloop clauses. */
12137 case OMP_CLAUSE_GRAINSIZE:
12138 case OMP_CLAUSE_NUM_TASKS:
12139 case OMP_CLAUSE_NOGROUP:
12140 *gforo_clauses_ptr = c;
12141 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12142 break;
12143 /* Taskloop clause we duplicate on both taskloops. */
12144 case OMP_CLAUSE_COLLAPSE:
12145 *gfor_clauses_ptr = c;
12146 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12147 *gforo_clauses_ptr = copy_node (c);
12148 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12149 break;
12150 /* For lastprivate, keep the clause on inner taskloop, and add
12151 a shared clause on task. If the same decl is also firstprivate,
12152 add also firstprivate clause on the inner taskloop. */
12153 case OMP_CLAUSE_LASTPRIVATE:
12154 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12156 /* For taskloop C++ lastprivate IVs, we want:
12157 1) private on outer taskloop
12158 2) firstprivate and shared on task
12159 3) lastprivate on inner taskloop */
12160 *gtask_clauses_ptr
12161 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12162 OMP_CLAUSE_FIRSTPRIVATE);
12163 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12164 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
12165 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12166 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
12167 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12168 OMP_CLAUSE_PRIVATE);
12169 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
12170 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
12171 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
12172 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12174 *gfor_clauses_ptr = c;
12175 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12176 *gtask_clauses_ptr
12177 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
12178 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12179 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12180 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
12181 gtask_clauses_ptr
12182 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12183 break;
12184 default:
12185 gcc_unreachable ();
12187 *gfor_clauses_ptr = NULL_TREE;
12188 *gtask_clauses_ptr = NULL_TREE;
12189 *gforo_clauses_ptr = NULL_TREE;
12190 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
12191 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
12192 NULL_TREE, NULL_TREE, NULL_TREE);
12193 gimple_omp_task_set_taskloop_p (g, true);
12194 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
12195 gomp_for *gforo
12196 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
12197 gimple_omp_for_collapse (gfor),
12198 gimple_omp_for_pre_body (gfor));
12199 gimple_omp_for_set_pre_body (gfor, NULL);
12200 gimple_omp_for_set_combined_p (gforo, true);
12201 gimple_omp_for_set_combined_into_p (gfor, true);
12202 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
12204 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
12205 tree v = create_tmp_var (type);
12206 gimple_omp_for_set_index (gforo, i, v);
12207 t = unshare_expr (gimple_omp_for_initial (gfor, i));
12208 gimple_omp_for_set_initial (gforo, i, t);
12209 gimple_omp_for_set_cond (gforo, i,
12210 gimple_omp_for_cond (gfor, i));
12211 t = unshare_expr (gimple_omp_for_final (gfor, i));
12212 gimple_omp_for_set_final (gforo, i, t);
12213 t = unshare_expr (gimple_omp_for_incr (gfor, i));
12214 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
12215 TREE_OPERAND (t, 0) = v;
12216 gimple_omp_for_set_incr (gforo, i, t);
12217 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
12218 OMP_CLAUSE_DECL (t) = v;
12219 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
12220 gimple_omp_for_set_clauses (gforo, t);
12222 gimplify_seq_add_stmt (pre_p, gforo);
12224 else
12225 gimplify_seq_add_stmt (pre_p, gfor);
12227 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
12229 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12230 unsigned lastprivate_conditional = 0;
12231 while (ctx
12232 && (ctx->region_type == ORT_TARGET_DATA
12233 || ctx->region_type == ORT_TASKGROUP))
12234 ctx = ctx->outer_context;
12235 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
12236 for (tree c = gimple_omp_for_clauses (gfor);
12237 c; c = OMP_CLAUSE_CHAIN (c))
12238 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12239 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12240 ++lastprivate_conditional;
12241 if (lastprivate_conditional)
12243 struct omp_for_data fd;
12244 omp_extract_for_data (gfor, &fd, NULL);
12245 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
12246 lastprivate_conditional);
12247 tree var = create_tmp_var_raw (type);
12248 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
12249 OMP_CLAUSE_DECL (c) = var;
12250 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12251 gimple_omp_for_set_clauses (gfor, c);
12252 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
12255 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
12257 unsigned lastprivate_conditional = 0;
12258 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
12259 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12260 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12261 ++lastprivate_conditional;
12262 if (lastprivate_conditional)
12264 struct omp_for_data fd;
12265 omp_extract_for_data (gfor, &fd, NULL);
12266 tree type = unsigned_type_for (fd.iter_type);
12267 while (lastprivate_conditional--)
12269 tree c = build_omp_clause (UNKNOWN_LOCATION,
12270 OMP_CLAUSE__CONDTEMP_);
12271 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
12272 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12273 gimple_omp_for_set_clauses (gfor, c);
12278 if (ret != GS_ALL_DONE)
12279 return GS_ERROR;
12280 *expr_p = NULL_TREE;
12281 return GS_ALL_DONE;
12284 /* Helper for gimplify_omp_loop, called through walk_tree. */
12286 static tree
12287 replace_reduction_placeholders (tree *tp, int *walk_subtrees, void *data)
12289 if (DECL_P (*tp))
12291 tree *d = (tree *) data;
12292 if (*tp == OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[0]))
12294 *tp = OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[1]);
12295 *walk_subtrees = 0;
12297 else if (*tp == OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[0]))
12299 *tp = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[1]);
12300 *walk_subtrees = 0;
12303 return NULL_TREE;
12306 /* Gimplify the gross structure of an OMP_LOOP statement. */
12308 static enum gimplify_status
12309 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
12311 tree for_stmt = *expr_p;
12312 tree clauses = OMP_FOR_CLAUSES (for_stmt);
12313 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
12314 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
12315 int i;
12317 /* If order is not present, the behavior is as if order(concurrent)
12318 appeared. */
12319 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
12320 if (order == NULL_TREE)
12322 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
12323 OMP_CLAUSE_CHAIN (order) = clauses;
12324 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
12327 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
12328 if (bind == NULL_TREE)
12330 if (!flag_openmp) /* flag_openmp_simd */
12332 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
12333 kind = OMP_CLAUSE_BIND_TEAMS;
12334 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
12335 kind = OMP_CLAUSE_BIND_PARALLEL;
12336 else
12338 for (; octx; octx = octx->outer_context)
12340 if ((octx->region_type & ORT_ACC) != 0
12341 || octx->region_type == ORT_NONE
12342 || octx->region_type == ORT_IMPLICIT_TARGET)
12343 continue;
12344 break;
12346 if (octx == NULL && !in_omp_construct)
12347 error_at (EXPR_LOCATION (for_stmt),
12348 "%<bind%> clause not specified on a %<loop%> "
12349 "construct not nested inside another OpenMP construct");
12351 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
12352 OMP_CLAUSE_CHAIN (bind) = clauses;
12353 OMP_CLAUSE_BIND_KIND (bind) = kind;
12354 OMP_FOR_CLAUSES (for_stmt) = bind;
12356 else
12357 switch (OMP_CLAUSE_BIND_KIND (bind))
12359 case OMP_CLAUSE_BIND_THREAD:
12360 break;
12361 case OMP_CLAUSE_BIND_PARALLEL:
12362 if (!flag_openmp) /* flag_openmp_simd */
12364 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12365 break;
12367 for (; octx; octx = octx->outer_context)
12368 if (octx->region_type == ORT_SIMD
12369 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
12371 error_at (EXPR_LOCATION (for_stmt),
12372 "%<bind(parallel)%> on a %<loop%> construct nested "
12373 "inside %<simd%> construct");
12374 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12375 break;
12377 kind = OMP_CLAUSE_BIND_PARALLEL;
12378 break;
12379 case OMP_CLAUSE_BIND_TEAMS:
12380 if (!flag_openmp) /* flag_openmp_simd */
12382 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12383 break;
12385 if ((octx
12386 && octx->region_type != ORT_IMPLICIT_TARGET
12387 && octx->region_type != ORT_NONE
12388 && (octx->region_type & ORT_TEAMS) == 0)
12389 || in_omp_construct)
12391 error_at (EXPR_LOCATION (for_stmt),
12392 "%<bind(teams)%> on a %<loop%> region not strictly "
12393 "nested inside of a %<teams%> region");
12394 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12395 break;
12397 kind = OMP_CLAUSE_BIND_TEAMS;
12398 break;
12399 default:
12400 gcc_unreachable ();
12403 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
12404 switch (OMP_CLAUSE_CODE (*pc))
12406 case OMP_CLAUSE_REDUCTION:
12407 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
12409 error_at (OMP_CLAUSE_LOCATION (*pc),
12410 "%<inscan%> %<reduction%> clause on "
12411 "%qs construct", "loop");
12412 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
12414 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
12416 error_at (OMP_CLAUSE_LOCATION (*pc),
12417 "invalid %<task%> reduction modifier on construct "
12418 "other than %<parallel%>, %<for%> or %<sections%>");
12419 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
12421 pc = &OMP_CLAUSE_CHAIN (*pc);
12422 break;
12423 case OMP_CLAUSE_LASTPRIVATE:
12424 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12426 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12427 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12428 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
12429 break;
12430 if (OMP_FOR_ORIG_DECLS (for_stmt)
12431 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12432 i)) == TREE_LIST
12433 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12434 i)))
12436 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12437 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
12438 break;
12441 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
12443 error_at (OMP_CLAUSE_LOCATION (*pc),
12444 "%<lastprivate%> clause on a %<loop%> construct refers "
12445 "to a variable %qD which is not the loop iterator",
12446 OMP_CLAUSE_DECL (*pc));
12447 *pc = OMP_CLAUSE_CHAIN (*pc);
12448 break;
12450 pc = &OMP_CLAUSE_CHAIN (*pc);
12451 break;
12452 default:
12453 pc = &OMP_CLAUSE_CHAIN (*pc);
12454 break;
12457 TREE_SET_CODE (for_stmt, OMP_SIMD);
12459 int last;
12460 switch (kind)
12462 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
12463 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
12464 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
12466 for (int pass = 1; pass <= last; pass++)
12468 if (pass == 2)
12470 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12471 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
12472 *expr_p = make_node (OMP_PARALLEL);
12473 TREE_TYPE (*expr_p) = void_type_node;
12474 OMP_PARALLEL_BODY (*expr_p) = bind;
12475 OMP_PARALLEL_COMBINED (*expr_p) = 1;
12476 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
12477 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
12478 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12479 if (OMP_FOR_ORIG_DECLS (for_stmt)
12480 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
12481 == TREE_LIST))
12483 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12484 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
12486 *pc = build_omp_clause (UNKNOWN_LOCATION,
12487 OMP_CLAUSE_FIRSTPRIVATE);
12488 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
12489 pc = &OMP_CLAUSE_CHAIN (*pc);
12493 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
12494 tree *pc = &OMP_FOR_CLAUSES (t);
12495 TREE_TYPE (t) = void_type_node;
12496 OMP_FOR_BODY (t) = *expr_p;
12497 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
12498 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12499 switch (OMP_CLAUSE_CODE (c))
12501 case OMP_CLAUSE_BIND:
12502 case OMP_CLAUSE_ORDER:
12503 case OMP_CLAUSE_COLLAPSE:
12504 *pc = copy_node (c);
12505 pc = &OMP_CLAUSE_CHAIN (*pc);
12506 break;
12507 case OMP_CLAUSE_PRIVATE:
12508 case OMP_CLAUSE_FIRSTPRIVATE:
12509 /* Only needed on innermost. */
12510 break;
12511 case OMP_CLAUSE_LASTPRIVATE:
12512 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
12514 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12515 OMP_CLAUSE_FIRSTPRIVATE);
12516 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
12517 lang_hooks.decls.omp_finish_clause (*pc, NULL);
12518 pc = &OMP_CLAUSE_CHAIN (*pc);
12520 *pc = copy_node (c);
12521 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
12522 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12523 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12525 if (pass != last)
12526 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
12527 else
12528 lang_hooks.decls.omp_finish_clause (*pc, NULL);
12529 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
12531 pc = &OMP_CLAUSE_CHAIN (*pc);
12532 break;
12533 case OMP_CLAUSE_REDUCTION:
12534 *pc = copy_node (c);
12535 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
12536 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12537 OMP_CLAUSE_REDUCTION_INIT (*pc)
12538 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
12539 OMP_CLAUSE_REDUCTION_MERGE (*pc)
12540 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
12541 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
12543 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
12544 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
12545 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
12546 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
12547 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
12548 tree nc = *pc;
12549 tree data[2] = { c, nc };
12550 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc),
12551 replace_reduction_placeholders,
12552 data);
12553 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc),
12554 replace_reduction_placeholders,
12555 data);
12557 pc = &OMP_CLAUSE_CHAIN (*pc);
12558 break;
12559 default:
12560 gcc_unreachable ();
12562 *pc = NULL_TREE;
12563 *expr_p = t;
12565 return gimplify_omp_for (expr_p, pre_p);
12569 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
12570 of OMP_TARGET's body. */
12572 static tree
12573 find_omp_teams (tree *tp, int *walk_subtrees, void *)
12575 *walk_subtrees = 0;
12576 switch (TREE_CODE (*tp))
12578 case OMP_TEAMS:
12579 return *tp;
12580 case BIND_EXPR:
12581 case STATEMENT_LIST:
12582 *walk_subtrees = 1;
12583 break;
12584 default:
12585 break;
12587 return NULL_TREE;
12590 /* Helper function of optimize_target_teams, determine if the expression
12591 can be computed safely before the target construct on the host. */
12593 static tree
12594 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
12596 splay_tree_node n;
12598 if (TYPE_P (*tp))
12600 *walk_subtrees = 0;
12601 return NULL_TREE;
12603 switch (TREE_CODE (*tp))
12605 case VAR_DECL:
12606 case PARM_DECL:
12607 case RESULT_DECL:
12608 *walk_subtrees = 0;
12609 if (error_operand_p (*tp)
12610 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
12611 || DECL_HAS_VALUE_EXPR_P (*tp)
12612 || DECL_THREAD_LOCAL_P (*tp)
12613 || TREE_SIDE_EFFECTS (*tp)
12614 || TREE_THIS_VOLATILE (*tp))
12615 return *tp;
12616 if (is_global_var (*tp)
12617 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
12618 || lookup_attribute ("omp declare target link",
12619 DECL_ATTRIBUTES (*tp))))
12620 return *tp;
12621 if (VAR_P (*tp)
12622 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
12623 && !is_global_var (*tp)
12624 && decl_function_context (*tp) == current_function_decl)
12625 return *tp;
12626 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12627 (splay_tree_key) *tp);
12628 if (n == NULL)
12630 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
12631 return NULL_TREE;
12632 return *tp;
12634 else if (n->value & GOVD_LOCAL)
12635 return *tp;
12636 else if (n->value & GOVD_FIRSTPRIVATE)
12637 return NULL_TREE;
12638 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12639 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12640 return NULL_TREE;
12641 return *tp;
12642 case INTEGER_CST:
12643 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12644 return *tp;
12645 return NULL_TREE;
12646 case TARGET_EXPR:
12647 if (TARGET_EXPR_INITIAL (*tp)
12648 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
12649 return *tp;
12650 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
12651 walk_subtrees, NULL);
12652 /* Allow some reasonable subset of integral arithmetics. */
12653 case PLUS_EXPR:
12654 case MINUS_EXPR:
12655 case MULT_EXPR:
12656 case TRUNC_DIV_EXPR:
12657 case CEIL_DIV_EXPR:
12658 case FLOOR_DIV_EXPR:
12659 case ROUND_DIV_EXPR:
12660 case TRUNC_MOD_EXPR:
12661 case CEIL_MOD_EXPR:
12662 case FLOOR_MOD_EXPR:
12663 case ROUND_MOD_EXPR:
12664 case RDIV_EXPR:
12665 case EXACT_DIV_EXPR:
12666 case MIN_EXPR:
12667 case MAX_EXPR:
12668 case LSHIFT_EXPR:
12669 case RSHIFT_EXPR:
12670 case BIT_IOR_EXPR:
12671 case BIT_XOR_EXPR:
12672 case BIT_AND_EXPR:
12673 case NEGATE_EXPR:
12674 case ABS_EXPR:
12675 case BIT_NOT_EXPR:
12676 case NON_LVALUE_EXPR:
12677 CASE_CONVERT:
12678 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12679 return *tp;
12680 return NULL_TREE;
12681 /* And disallow anything else, except for comparisons. */
12682 default:
12683 if (COMPARISON_CLASS_P (*tp))
12684 return NULL_TREE;
12685 return *tp;
12689 /* Try to determine if the num_teams and/or thread_limit expressions
12690 can have their values determined already before entering the
12691 target construct.
12692 INTEGER_CSTs trivially are,
12693 integral decls that are firstprivate (explicitly or implicitly)
12694 or explicitly map(always, to:) or map(always, tofrom:) on the target
12695 region too, and expressions involving simple arithmetics on those
12696 too, function calls are not ok, dereferencing something neither etc.
12697 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
12698 EXPR based on what we find:
12699 0 stands for clause not specified at all, use implementation default
12700 -1 stands for value that can't be determined easily before entering
12701 the target construct.
12702 If teams construct is not present at all, use 1 for num_teams
12703 and 0 for thread_limit (only one team is involved, and the thread
12704 limit is implementation defined. */
12706 static void
12707 optimize_target_teams (tree target, gimple_seq *pre_p)
12709 tree body = OMP_BODY (target);
12710 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
12711 tree num_teams = integer_zero_node;
12712 tree thread_limit = integer_zero_node;
12713 location_t num_teams_loc = EXPR_LOCATION (target);
12714 location_t thread_limit_loc = EXPR_LOCATION (target);
12715 tree c, *p, expr;
12716 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
12718 if (teams == NULL_TREE)
12719 num_teams = integer_one_node;
12720 else
12721 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
12723 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
12725 p = &num_teams;
12726 num_teams_loc = OMP_CLAUSE_LOCATION (c);
12728 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
12730 p = &thread_limit;
12731 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
12733 else
12734 continue;
12735 expr = OMP_CLAUSE_OPERAND (c, 0);
12736 if (TREE_CODE (expr) == INTEGER_CST)
12738 *p = expr;
12739 continue;
12741 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
12743 *p = integer_minus_one_node;
12744 continue;
12746 *p = expr;
12747 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
12748 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
12749 == GS_ERROR)
12751 gimplify_omp_ctxp = target_ctx;
12752 *p = integer_minus_one_node;
12753 continue;
12755 gimplify_omp_ctxp = target_ctx;
12756 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
12757 OMP_CLAUSE_OPERAND (c, 0) = *p;
12759 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
12760 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
12761 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
12762 OMP_TARGET_CLAUSES (target) = c;
12763 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
12764 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
12765 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
12766 OMP_TARGET_CLAUSES (target) = c;
12769 /* Gimplify the gross structure of several OMP constructs. */
12771 static void
12772 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
12774 tree expr = *expr_p;
12775 gimple *stmt;
12776 gimple_seq body = NULL;
12777 enum omp_region_type ort;
12779 switch (TREE_CODE (expr))
12781 case OMP_SECTIONS:
12782 case OMP_SINGLE:
12783 ort = ORT_WORKSHARE;
12784 break;
12785 case OMP_TARGET:
12786 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
12787 break;
12788 case OACC_KERNELS:
12789 ort = ORT_ACC_KERNELS;
12790 break;
12791 case OACC_PARALLEL:
12792 ort = ORT_ACC_PARALLEL;
12793 break;
12794 case OACC_SERIAL:
12795 ort = ORT_ACC_SERIAL;
12796 break;
12797 case OACC_DATA:
12798 ort = ORT_ACC_DATA;
12799 break;
12800 case OMP_TARGET_DATA:
12801 ort = ORT_TARGET_DATA;
12802 break;
12803 case OMP_TEAMS:
12804 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
12805 if (gimplify_omp_ctxp == NULL
12806 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
12807 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
12808 break;
12809 case OACC_HOST_DATA:
12810 ort = ORT_ACC_HOST_DATA;
12811 break;
12812 default:
12813 gcc_unreachable ();
12816 bool save_in_omp_construct = in_omp_construct;
12817 if ((ort & ORT_ACC) == 0)
12818 in_omp_construct = false;
12819 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
12820 TREE_CODE (expr));
12821 if (TREE_CODE (expr) == OMP_TARGET)
12822 optimize_target_teams (expr, pre_p);
12823 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
12824 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
12826 push_gimplify_context ();
12827 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
12828 if (gimple_code (g) == GIMPLE_BIND)
12829 pop_gimplify_context (g);
12830 else
12831 pop_gimplify_context (NULL);
12832 if ((ort & ORT_TARGET_DATA) != 0)
12834 enum built_in_function end_ix;
12835 switch (TREE_CODE (expr))
12837 case OACC_DATA:
12838 case OACC_HOST_DATA:
12839 end_ix = BUILT_IN_GOACC_DATA_END;
12840 break;
12841 case OMP_TARGET_DATA:
12842 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
12843 break;
12844 default:
12845 gcc_unreachable ();
12847 tree fn = builtin_decl_explicit (end_ix);
12848 g = gimple_build_call (fn, 0);
12849 gimple_seq cleanup = NULL;
12850 gimple_seq_add_stmt (&cleanup, g);
12851 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
12852 body = NULL;
12853 gimple_seq_add_stmt (&body, g);
12856 else
12857 gimplify_and_add (OMP_BODY (expr), &body);
12858 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
12859 TREE_CODE (expr));
12860 in_omp_construct = save_in_omp_construct;
12862 switch (TREE_CODE (expr))
12864 case OACC_DATA:
12865 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
12866 OMP_CLAUSES (expr));
12867 break;
12868 case OACC_HOST_DATA:
12869 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
12871 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12872 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
12873 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
12876 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
12877 OMP_CLAUSES (expr));
12878 break;
12879 case OACC_KERNELS:
12880 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
12881 OMP_CLAUSES (expr));
12882 break;
12883 case OACC_PARALLEL:
12884 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
12885 OMP_CLAUSES (expr));
12886 break;
12887 case OACC_SERIAL:
12888 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
12889 OMP_CLAUSES (expr));
12890 break;
12891 case OMP_SECTIONS:
12892 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
12893 break;
12894 case OMP_SINGLE:
12895 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
12896 break;
12897 case OMP_TARGET:
12898 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
12899 OMP_CLAUSES (expr));
12900 break;
12901 case OMP_TARGET_DATA:
12902 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
12903 to be evaluated before the use_device_{ptr,addr} clauses if they
12904 refer to the same variables. */
12906 tree use_device_clauses;
12907 tree *pc, *uc = &use_device_clauses;
12908 for (pc = &OMP_CLAUSES (expr); *pc; )
12909 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
12910 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
12912 *uc = *pc;
12913 *pc = OMP_CLAUSE_CHAIN (*pc);
12914 uc = &OMP_CLAUSE_CHAIN (*uc);
12916 else
12917 pc = &OMP_CLAUSE_CHAIN (*pc);
12918 *uc = NULL_TREE;
12919 *pc = use_device_clauses;
12920 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
12921 OMP_CLAUSES (expr));
12923 break;
12924 case OMP_TEAMS:
12925 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
12926 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
12927 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
12928 break;
12929 default:
12930 gcc_unreachable ();
12933 gimplify_seq_add_stmt (pre_p, stmt);
12934 *expr_p = NULL_TREE;
12937 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
12938 target update constructs. */
12940 static void
12941 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
12943 tree expr = *expr_p;
12944 int kind;
12945 gomp_target *stmt;
12946 enum omp_region_type ort = ORT_WORKSHARE;
12948 switch (TREE_CODE (expr))
12950 case OACC_ENTER_DATA:
12951 case OACC_EXIT_DATA:
12952 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
12953 ort = ORT_ACC;
12954 break;
12955 case OACC_UPDATE:
12956 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
12957 ort = ORT_ACC;
12958 break;
12959 case OMP_TARGET_UPDATE:
12960 kind = GF_OMP_TARGET_KIND_UPDATE;
12961 break;
12962 case OMP_TARGET_ENTER_DATA:
12963 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
12964 break;
12965 case OMP_TARGET_EXIT_DATA:
12966 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
12967 break;
12968 default:
12969 gcc_unreachable ();
12971 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
12972 ort, TREE_CODE (expr));
12973 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
12974 TREE_CODE (expr));
12975 if (TREE_CODE (expr) == OACC_UPDATE
12976 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12977 OMP_CLAUSE_IF_PRESENT))
12979 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
12980 clause. */
12981 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12982 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
12983 switch (OMP_CLAUSE_MAP_KIND (c))
12985 case GOMP_MAP_FORCE_TO:
12986 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
12987 break;
12988 case GOMP_MAP_FORCE_FROM:
12989 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
12990 break;
12991 default:
12992 break;
12995 else if (TREE_CODE (expr) == OACC_EXIT_DATA
12996 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12997 OMP_CLAUSE_FINALIZE))
12999 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
13000 semantics. */
13001 bool have_clause = false;
13002 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13003 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
13004 switch (OMP_CLAUSE_MAP_KIND (c))
13006 case GOMP_MAP_FROM:
13007 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
13008 have_clause = true;
13009 break;
13010 case GOMP_MAP_RELEASE:
13011 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
13012 have_clause = true;
13013 break;
13014 case GOMP_MAP_POINTER:
13015 case GOMP_MAP_TO_PSET:
13016 /* TODO PR92929: we may see these here, but they'll always follow
13017 one of the clauses above, and will be handled by libgomp as
13018 one group, so no handling required here. */
13019 gcc_assert (have_clause);
13020 break;
13021 case GOMP_MAP_DETACH:
13022 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
13023 have_clause = false;
13024 break;
13025 case GOMP_MAP_STRUCT:
13026 have_clause = false;
13027 break;
13028 default:
13029 gcc_unreachable ();
13032 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
13034 gimplify_seq_add_stmt (pre_p, stmt);
13035 *expr_p = NULL_TREE;
13038 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
13039 stabilized the lhs of the atomic operation as *ADDR. Return true if
13040 EXPR is this stabilized form. */
13042 static bool
13043 goa_lhs_expr_p (tree expr, tree addr)
13045 /* Also include casts to other type variants. The C front end is fond
13046 of adding these for e.g. volatile variables. This is like
13047 STRIP_TYPE_NOPS but includes the main variant lookup. */
13048 STRIP_USELESS_TYPE_CONVERSION (expr);
13050 if (TREE_CODE (expr) == INDIRECT_REF)
13052 expr = TREE_OPERAND (expr, 0);
13053 while (expr != addr
13054 && (CONVERT_EXPR_P (expr)
13055 || TREE_CODE (expr) == NON_LVALUE_EXPR)
13056 && TREE_CODE (expr) == TREE_CODE (addr)
13057 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
13059 expr = TREE_OPERAND (expr, 0);
13060 addr = TREE_OPERAND (addr, 0);
13062 if (expr == addr)
13063 return true;
13064 return (TREE_CODE (addr) == ADDR_EXPR
13065 && TREE_CODE (expr) == ADDR_EXPR
13066 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
13068 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
13069 return true;
13070 return false;
13073 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
13074 expression does not involve the lhs, evaluate it into a temporary.
13075 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
13076 or -1 if an error was encountered. */
13078 static int
13079 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
13080 tree lhs_var)
13082 tree expr = *expr_p;
13083 int saw_lhs;
13085 if (goa_lhs_expr_p (expr, lhs_addr))
13087 *expr_p = lhs_var;
13088 return 1;
13090 if (is_gimple_val (expr))
13091 return 0;
13093 saw_lhs = 0;
13094 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
13096 case tcc_binary:
13097 case tcc_comparison:
13098 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
13099 lhs_var);
13100 /* FALLTHRU */
13101 case tcc_unary:
13102 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
13103 lhs_var);
13104 break;
13105 case tcc_expression:
13106 switch (TREE_CODE (expr))
13108 case TRUTH_ANDIF_EXPR:
13109 case TRUTH_ORIF_EXPR:
13110 case TRUTH_AND_EXPR:
13111 case TRUTH_OR_EXPR:
13112 case TRUTH_XOR_EXPR:
13113 case BIT_INSERT_EXPR:
13114 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
13115 lhs_addr, lhs_var);
13116 /* FALLTHRU */
13117 case TRUTH_NOT_EXPR:
13118 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13119 lhs_addr, lhs_var);
13120 break;
13121 case COMPOUND_EXPR:
13122 /* Break out any preevaluations from cp_build_modify_expr. */
13123 for (; TREE_CODE (expr) == COMPOUND_EXPR;
13124 expr = TREE_OPERAND (expr, 1))
13125 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
13126 *expr_p = expr;
13127 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
13128 default:
13129 break;
13131 break;
13132 case tcc_reference:
13133 if (TREE_CODE (expr) == BIT_FIELD_REF)
13134 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13135 lhs_addr, lhs_var);
13136 break;
13137 default:
13138 break;
13141 if (saw_lhs == 0)
13143 enum gimplify_status gs;
13144 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
13145 if (gs != GS_ALL_DONE)
13146 saw_lhs = -1;
13149 return saw_lhs;
13152 /* Gimplify an OMP_ATOMIC statement. */
13154 static enum gimplify_status
13155 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
13157 tree addr = TREE_OPERAND (*expr_p, 0);
13158 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
13159 ? NULL : TREE_OPERAND (*expr_p, 1);
13160 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
13161 tree tmp_load;
13162 gomp_atomic_load *loadstmt;
13163 gomp_atomic_store *storestmt;
13165 tmp_load = create_tmp_reg (type);
13166 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
13167 return GS_ERROR;
13169 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
13170 != GS_ALL_DONE)
13171 return GS_ERROR;
13173 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
13174 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13175 gimplify_seq_add_stmt (pre_p, loadstmt);
13176 if (rhs)
13178 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
13179 representatives. Use BIT_FIELD_REF on the lhs instead. */
13180 if (TREE_CODE (rhs) == BIT_INSERT_EXPR
13181 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
13183 tree bitpos = TREE_OPERAND (rhs, 2);
13184 tree op1 = TREE_OPERAND (rhs, 1);
13185 tree bitsize;
13186 tree tmp_store = tmp_load;
13187 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
13188 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
13189 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
13190 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
13191 else
13192 bitsize = TYPE_SIZE (TREE_TYPE (op1));
13193 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
13194 tree t = build2_loc (EXPR_LOCATION (rhs),
13195 MODIFY_EXPR, void_type_node,
13196 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
13197 TREE_TYPE (op1), tmp_store, bitsize,
13198 bitpos), op1);
13199 gimplify_and_add (t, pre_p);
13200 rhs = tmp_store;
13202 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
13203 != GS_ALL_DONE)
13204 return GS_ERROR;
13207 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
13208 rhs = tmp_load;
13209 storestmt
13210 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13211 gimplify_seq_add_stmt (pre_p, storestmt);
13212 switch (TREE_CODE (*expr_p))
13214 case OMP_ATOMIC_READ:
13215 case OMP_ATOMIC_CAPTURE_OLD:
13216 *expr_p = tmp_load;
13217 gimple_omp_atomic_set_need_value (loadstmt);
13218 break;
13219 case OMP_ATOMIC_CAPTURE_NEW:
13220 *expr_p = rhs;
13221 gimple_omp_atomic_set_need_value (storestmt);
13222 break;
13223 default:
13224 *expr_p = NULL;
13225 break;
13228 return GS_ALL_DONE;
13231 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
13232 body, and adding some EH bits. */
13234 static enum gimplify_status
13235 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
13237 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
13238 gimple *body_stmt;
13239 gtransaction *trans_stmt;
13240 gimple_seq body = NULL;
13241 int subcode = 0;
13243 /* Wrap the transaction body in a BIND_EXPR so we have a context
13244 where to put decls for OMP. */
13245 if (TREE_CODE (tbody) != BIND_EXPR)
13247 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
13248 TREE_SIDE_EFFECTS (bind) = 1;
13249 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
13250 TRANSACTION_EXPR_BODY (expr) = bind;
13253 push_gimplify_context ();
13254 temp = voidify_wrapper_expr (*expr_p, NULL);
13256 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
13257 pop_gimplify_context (body_stmt);
13259 trans_stmt = gimple_build_transaction (body);
13260 if (TRANSACTION_EXPR_OUTER (expr))
13261 subcode = GTMA_IS_OUTER;
13262 else if (TRANSACTION_EXPR_RELAXED (expr))
13263 subcode = GTMA_IS_RELAXED;
13264 gimple_transaction_set_subcode (trans_stmt, subcode);
13266 gimplify_seq_add_stmt (pre_p, trans_stmt);
13268 if (temp)
13270 *expr_p = temp;
13271 return GS_OK;
13274 *expr_p = NULL_TREE;
13275 return GS_ALL_DONE;
13278 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
13279 is the OMP_BODY of the original EXPR (which has already been
13280 gimplified so it's not present in the EXPR).
13282 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
13284 static gimple *
13285 gimplify_omp_ordered (tree expr, gimple_seq body)
13287 tree c, decls;
13288 int failures = 0;
13289 unsigned int i;
13290 tree source_c = NULL_TREE;
13291 tree sink_c = NULL_TREE;
13293 if (gimplify_omp_ctxp)
13295 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13296 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13297 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
13298 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
13299 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
13301 error_at (OMP_CLAUSE_LOCATION (c),
13302 "%<ordered%> construct with %<depend%> clause must be "
13303 "closely nested inside a loop with %<ordered%> clause "
13304 "with a parameter");
13305 failures++;
13307 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13308 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
13310 bool fail = false;
13311 for (decls = OMP_CLAUSE_DECL (c), i = 0;
13312 decls && TREE_CODE (decls) == TREE_LIST;
13313 decls = TREE_CHAIN (decls), ++i)
13314 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
13315 continue;
13316 else if (TREE_VALUE (decls)
13317 != gimplify_omp_ctxp->loop_iter_var[2 * i])
13319 error_at (OMP_CLAUSE_LOCATION (c),
13320 "variable %qE is not an iteration "
13321 "of outermost loop %d, expected %qE",
13322 TREE_VALUE (decls), i + 1,
13323 gimplify_omp_ctxp->loop_iter_var[2 * i]);
13324 fail = true;
13325 failures++;
13327 else
13328 TREE_VALUE (decls)
13329 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
13330 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
13332 error_at (OMP_CLAUSE_LOCATION (c),
13333 "number of variables in %<depend%> clause with "
13334 "%<sink%> modifier does not match number of "
13335 "iteration variables");
13336 failures++;
13338 sink_c = c;
13340 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13341 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
13343 if (source_c)
13345 error_at (OMP_CLAUSE_LOCATION (c),
13346 "more than one %<depend%> clause with %<source%> "
13347 "modifier on an %<ordered%> construct");
13348 failures++;
13350 else
13351 source_c = c;
13354 if (source_c && sink_c)
13356 error_at (OMP_CLAUSE_LOCATION (source_c),
13357 "%<depend%> clause with %<source%> modifier specified "
13358 "together with %<depend%> clauses with %<sink%> modifier "
13359 "on the same construct");
13360 failures++;
13363 if (failures)
13364 return gimple_build_nop ();
13365 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
13368 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
13369 expression produces a value to be used as an operand inside a GIMPLE
13370 statement, the value will be stored back in *EXPR_P. This value will
13371 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
13372 an SSA_NAME. The corresponding sequence of GIMPLE statements is
13373 emitted in PRE_P and POST_P.
13375 Additionally, this process may overwrite parts of the input
13376 expression during gimplification. Ideally, it should be
13377 possible to do non-destructive gimplification.
13379 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
13380 the expression needs to evaluate to a value to be used as
13381 an operand in a GIMPLE statement, this value will be stored in
13382 *EXPR_P on exit. This happens when the caller specifies one
13383 of fb_lvalue or fb_rvalue fallback flags.
13385 PRE_P will contain the sequence of GIMPLE statements corresponding
13386 to the evaluation of EXPR and all the side-effects that must
13387 be executed before the main expression. On exit, the last
13388 statement of PRE_P is the core statement being gimplified. For
13389 instance, when gimplifying 'if (++a)' the last statement in
13390 PRE_P will be 'if (t.1)' where t.1 is the result of
13391 pre-incrementing 'a'.
13393 POST_P will contain the sequence of GIMPLE statements corresponding
13394 to the evaluation of all the side-effects that must be executed
13395 after the main expression. If this is NULL, the post
13396 side-effects are stored at the end of PRE_P.
13398 The reason why the output is split in two is to handle post
13399 side-effects explicitly. In some cases, an expression may have
13400 inner and outer post side-effects which need to be emitted in
13401 an order different from the one given by the recursive
13402 traversal. For instance, for the expression (*p--)++ the post
13403 side-effects of '--' must actually occur *after* the post
13404 side-effects of '++'. However, gimplification will first visit
13405 the inner expression, so if a separate POST sequence was not
13406 used, the resulting sequence would be:
13408 1 t.1 = *p
13409 2 p = p - 1
13410 3 t.2 = t.1 + 1
13411 4 *p = t.2
13413 However, the post-decrement operation in line #2 must not be
13414 evaluated until after the store to *p at line #4, so the
13415 correct sequence should be:
13417 1 t.1 = *p
13418 2 t.2 = t.1 + 1
13419 3 *p = t.2
13420 4 p = p - 1
13422 So, by specifying a separate post queue, it is possible
13423 to emit the post side-effects in the correct order.
13424 If POST_P is NULL, an internal queue will be used. Before
13425 returning to the caller, the sequence POST_P is appended to
13426 the main output sequence PRE_P.
13428 GIMPLE_TEST_F points to a function that takes a tree T and
13429 returns nonzero if T is in the GIMPLE form requested by the
13430 caller. The GIMPLE predicates are in gimple.c.
13432 FALLBACK tells the function what sort of a temporary we want if
13433 gimplification cannot produce an expression that complies with
13434 GIMPLE_TEST_F.
13436 fb_none means that no temporary should be generated
13437 fb_rvalue means that an rvalue is OK to generate
13438 fb_lvalue means that an lvalue is OK to generate
13439 fb_either means that either is OK, but an lvalue is preferable.
13440 fb_mayfail means that gimplification may fail (in which case
13441 GS_ERROR will be returned)
13443 The return value is either GS_ERROR or GS_ALL_DONE, since this
13444 function iterates until EXPR is completely gimplified or an error
13445 occurs. */
13447 enum gimplify_status
13448 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13449 bool (*gimple_test_f) (tree), fallback_t fallback)
13451 tree tmp;
13452 gimple_seq internal_pre = NULL;
13453 gimple_seq internal_post = NULL;
13454 tree save_expr;
13455 bool is_statement;
13456 location_t saved_location;
13457 enum gimplify_status ret;
13458 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
13459 tree label;
13461 save_expr = *expr_p;
13462 if (save_expr == NULL_TREE)
13463 return GS_ALL_DONE;
13465 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
13466 is_statement = gimple_test_f == is_gimple_stmt;
13467 if (is_statement)
13468 gcc_assert (pre_p);
13470 /* Consistency checks. */
13471 if (gimple_test_f == is_gimple_reg)
13472 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
13473 else if (gimple_test_f == is_gimple_val
13474 || gimple_test_f == is_gimple_call_addr
13475 || gimple_test_f == is_gimple_condexpr
13476 || gimple_test_f == is_gimple_condexpr_for_cond
13477 || gimple_test_f == is_gimple_mem_rhs
13478 || gimple_test_f == is_gimple_mem_rhs_or_call
13479 || gimple_test_f == is_gimple_reg_rhs
13480 || gimple_test_f == is_gimple_reg_rhs_or_call
13481 || gimple_test_f == is_gimple_asm_val
13482 || gimple_test_f == is_gimple_mem_ref_addr)
13483 gcc_assert (fallback & fb_rvalue);
13484 else if (gimple_test_f == is_gimple_min_lval
13485 || gimple_test_f == is_gimple_lvalue)
13486 gcc_assert (fallback & fb_lvalue);
13487 else if (gimple_test_f == is_gimple_addressable)
13488 gcc_assert (fallback & fb_either);
13489 else if (gimple_test_f == is_gimple_stmt)
13490 gcc_assert (fallback == fb_none);
13491 else
13493 /* We should have recognized the GIMPLE_TEST_F predicate to
13494 know what kind of fallback to use in case a temporary is
13495 needed to hold the value or address of *EXPR_P. */
13496 gcc_unreachable ();
13499 /* We used to check the predicate here and return immediately if it
13500 succeeds. This is wrong; the design is for gimplification to be
13501 idempotent, and for the predicates to only test for valid forms, not
13502 whether they are fully simplified. */
13503 if (pre_p == NULL)
13504 pre_p = &internal_pre;
13506 if (post_p == NULL)
13507 post_p = &internal_post;
13509 /* Remember the last statements added to PRE_P and POST_P. Every
13510 new statement added by the gimplification helpers needs to be
13511 annotated with location information. To centralize the
13512 responsibility, we remember the last statement that had been
13513 added to both queues before gimplifying *EXPR_P. If
13514 gimplification produces new statements in PRE_P and POST_P, those
13515 statements will be annotated with the same location information
13516 as *EXPR_P. */
13517 pre_last_gsi = gsi_last (*pre_p);
13518 post_last_gsi = gsi_last (*post_p);
13520 saved_location = input_location;
13521 if (save_expr != error_mark_node
13522 && EXPR_HAS_LOCATION (*expr_p))
13523 input_location = EXPR_LOCATION (*expr_p);
13525 /* Loop over the specific gimplifiers until the toplevel node
13526 remains the same. */
13529 /* Strip away as many useless type conversions as possible
13530 at the toplevel. */
13531 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
13533 /* Remember the expr. */
13534 save_expr = *expr_p;
13536 /* Die, die, die, my darling. */
13537 if (error_operand_p (save_expr))
13539 ret = GS_ERROR;
13540 break;
13543 /* Do any language-specific gimplification. */
13544 ret = ((enum gimplify_status)
13545 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
13546 if (ret == GS_OK)
13548 if (*expr_p == NULL_TREE)
13549 break;
13550 if (*expr_p != save_expr)
13551 continue;
13553 else if (ret != GS_UNHANDLED)
13554 break;
13556 /* Make sure that all the cases set 'ret' appropriately. */
13557 ret = GS_UNHANDLED;
13558 switch (TREE_CODE (*expr_p))
13560 /* First deal with the special cases. */
13562 case POSTINCREMENT_EXPR:
13563 case POSTDECREMENT_EXPR:
13564 case PREINCREMENT_EXPR:
13565 case PREDECREMENT_EXPR:
13566 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
13567 fallback != fb_none,
13568 TREE_TYPE (*expr_p));
13569 break;
13571 case VIEW_CONVERT_EXPR:
13572 if ((fallback & fb_rvalue)
13573 && is_gimple_reg_type (TREE_TYPE (*expr_p))
13574 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
13576 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13577 post_p, is_gimple_val, fb_rvalue);
13578 recalculate_side_effects (*expr_p);
13579 break;
13581 /* Fallthru. */
13583 case ARRAY_REF:
13584 case ARRAY_RANGE_REF:
13585 case REALPART_EXPR:
13586 case IMAGPART_EXPR:
13587 case COMPONENT_REF:
13588 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
13589 fallback ? fallback : fb_rvalue);
13590 break;
13592 case COND_EXPR:
13593 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
13595 /* C99 code may assign to an array in a structure value of a
13596 conditional expression, and this has undefined behavior
13597 only on execution, so create a temporary if an lvalue is
13598 required. */
13599 if (fallback == fb_lvalue)
13601 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13602 mark_addressable (*expr_p);
13603 ret = GS_OK;
13605 break;
13607 case CALL_EXPR:
13608 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
13610 /* C99 code may assign to an array in a structure returned
13611 from a function, and this has undefined behavior only on
13612 execution, so create a temporary if an lvalue is
13613 required. */
13614 if (fallback == fb_lvalue)
13616 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13617 mark_addressable (*expr_p);
13618 ret = GS_OK;
13620 break;
13622 case TREE_LIST:
13623 gcc_unreachable ();
13625 case COMPOUND_EXPR:
13626 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
13627 break;
13629 case COMPOUND_LITERAL_EXPR:
13630 ret = gimplify_compound_literal_expr (expr_p, pre_p,
13631 gimple_test_f, fallback);
13632 break;
13634 case MODIFY_EXPR:
13635 case INIT_EXPR:
13636 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
13637 fallback != fb_none);
13638 break;
13640 case TRUTH_ANDIF_EXPR:
13641 case TRUTH_ORIF_EXPR:
13643 /* Preserve the original type of the expression and the
13644 source location of the outer expression. */
13645 tree org_type = TREE_TYPE (*expr_p);
13646 *expr_p = gimple_boolify (*expr_p);
13647 *expr_p = build3_loc (input_location, COND_EXPR,
13648 org_type, *expr_p,
13649 fold_convert_loc
13650 (input_location,
13651 org_type, boolean_true_node),
13652 fold_convert_loc
13653 (input_location,
13654 org_type, boolean_false_node));
13655 ret = GS_OK;
13656 break;
13659 case TRUTH_NOT_EXPR:
13661 tree type = TREE_TYPE (*expr_p);
13662 /* The parsers are careful to generate TRUTH_NOT_EXPR
13663 only with operands that are always zero or one.
13664 We do not fold here but handle the only interesting case
13665 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
13666 *expr_p = gimple_boolify (*expr_p);
13667 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
13668 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
13669 TREE_TYPE (*expr_p),
13670 TREE_OPERAND (*expr_p, 0));
13671 else
13672 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
13673 TREE_TYPE (*expr_p),
13674 TREE_OPERAND (*expr_p, 0),
13675 build_int_cst (TREE_TYPE (*expr_p), 1));
13676 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
13677 *expr_p = fold_convert_loc (input_location, type, *expr_p);
13678 ret = GS_OK;
13679 break;
13682 case ADDR_EXPR:
13683 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
13684 break;
13686 case ANNOTATE_EXPR:
13688 tree cond = TREE_OPERAND (*expr_p, 0);
13689 tree kind = TREE_OPERAND (*expr_p, 1);
13690 tree data = TREE_OPERAND (*expr_p, 2);
13691 tree type = TREE_TYPE (cond);
13692 if (!INTEGRAL_TYPE_P (type))
13694 *expr_p = cond;
13695 ret = GS_OK;
13696 break;
13698 tree tmp = create_tmp_var (type);
13699 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
13700 gcall *call
13701 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
13702 gimple_call_set_lhs (call, tmp);
13703 gimplify_seq_add_stmt (pre_p, call);
13704 *expr_p = tmp;
13705 ret = GS_ALL_DONE;
13706 break;
13709 case VA_ARG_EXPR:
13710 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
13711 break;
13713 CASE_CONVERT:
13714 if (IS_EMPTY_STMT (*expr_p))
13716 ret = GS_ALL_DONE;
13717 break;
13720 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
13721 || fallback == fb_none)
13723 /* Just strip a conversion to void (or in void context) and
13724 try again. */
13725 *expr_p = TREE_OPERAND (*expr_p, 0);
13726 ret = GS_OK;
13727 break;
13730 ret = gimplify_conversion (expr_p);
13731 if (ret == GS_ERROR)
13732 break;
13733 if (*expr_p != save_expr)
13734 break;
13735 /* FALLTHRU */
13737 case FIX_TRUNC_EXPR:
13738 /* unary_expr: ... | '(' cast ')' val | ... */
13739 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13740 is_gimple_val, fb_rvalue);
13741 recalculate_side_effects (*expr_p);
13742 break;
13744 case INDIRECT_REF:
13746 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
13747 bool notrap = TREE_THIS_NOTRAP (*expr_p);
13748 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
13750 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
13751 if (*expr_p != save_expr)
13753 ret = GS_OK;
13754 break;
13757 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13758 is_gimple_reg, fb_rvalue);
13759 if (ret == GS_ERROR)
13760 break;
13762 recalculate_side_effects (*expr_p);
13763 *expr_p = fold_build2_loc (input_location, MEM_REF,
13764 TREE_TYPE (*expr_p),
13765 TREE_OPERAND (*expr_p, 0),
13766 build_int_cst (saved_ptr_type, 0));
13767 TREE_THIS_VOLATILE (*expr_p) = volatilep;
13768 TREE_THIS_NOTRAP (*expr_p) = notrap;
13769 ret = GS_OK;
13770 break;
13773 /* We arrive here through the various re-gimplifcation paths. */
13774 case MEM_REF:
13775 /* First try re-folding the whole thing. */
13776 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
13777 TREE_OPERAND (*expr_p, 0),
13778 TREE_OPERAND (*expr_p, 1));
13779 if (tmp)
13781 REF_REVERSE_STORAGE_ORDER (tmp)
13782 = REF_REVERSE_STORAGE_ORDER (*expr_p);
13783 *expr_p = tmp;
13784 recalculate_side_effects (*expr_p);
13785 ret = GS_OK;
13786 break;
13788 /* Avoid re-gimplifying the address operand if it is already
13789 in suitable form. Re-gimplifying would mark the address
13790 operand addressable. Always gimplify when not in SSA form
13791 as we still may have to gimplify decls with value-exprs. */
13792 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
13793 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
13795 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13796 is_gimple_mem_ref_addr, fb_rvalue);
13797 if (ret == GS_ERROR)
13798 break;
13800 recalculate_side_effects (*expr_p);
13801 ret = GS_ALL_DONE;
13802 break;
13804 /* Constants need not be gimplified. */
13805 case INTEGER_CST:
13806 case REAL_CST:
13807 case FIXED_CST:
13808 case STRING_CST:
13809 case COMPLEX_CST:
13810 case VECTOR_CST:
13811 /* Drop the overflow flag on constants, we do not want
13812 that in the GIMPLE IL. */
13813 if (TREE_OVERFLOW_P (*expr_p))
13814 *expr_p = drop_tree_overflow (*expr_p);
13815 ret = GS_ALL_DONE;
13816 break;
13818 case CONST_DECL:
13819 /* If we require an lvalue, such as for ADDR_EXPR, retain the
13820 CONST_DECL node. Otherwise the decl is replaceable by its
13821 value. */
13822 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
13823 if (fallback & fb_lvalue)
13824 ret = GS_ALL_DONE;
13825 else
13827 *expr_p = DECL_INITIAL (*expr_p);
13828 ret = GS_OK;
13830 break;
13832 case DECL_EXPR:
13833 ret = gimplify_decl_expr (expr_p, pre_p);
13834 break;
13836 case BIND_EXPR:
13837 ret = gimplify_bind_expr (expr_p, pre_p);
13838 break;
13840 case LOOP_EXPR:
13841 ret = gimplify_loop_expr (expr_p, pre_p);
13842 break;
13844 case SWITCH_EXPR:
13845 ret = gimplify_switch_expr (expr_p, pre_p);
13846 break;
13848 case EXIT_EXPR:
13849 ret = gimplify_exit_expr (expr_p);
13850 break;
13852 case GOTO_EXPR:
13853 /* If the target is not LABEL, then it is a computed jump
13854 and the target needs to be gimplified. */
13855 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
13857 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
13858 NULL, is_gimple_val, fb_rvalue);
13859 if (ret == GS_ERROR)
13860 break;
13862 gimplify_seq_add_stmt (pre_p,
13863 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
13864 ret = GS_ALL_DONE;
13865 break;
13867 case PREDICT_EXPR:
13868 gimplify_seq_add_stmt (pre_p,
13869 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
13870 PREDICT_EXPR_OUTCOME (*expr_p)));
13871 ret = GS_ALL_DONE;
13872 break;
13874 case LABEL_EXPR:
13875 ret = gimplify_label_expr (expr_p, pre_p);
13876 label = LABEL_EXPR_LABEL (*expr_p);
13877 gcc_assert (decl_function_context (label) == current_function_decl);
13879 /* If the label is used in a goto statement, or address of the label
13880 is taken, we need to unpoison all variables that were seen so far.
13881 Doing so would prevent us from reporting a false positives. */
13882 if (asan_poisoned_variables
13883 && asan_used_labels != NULL
13884 && asan_used_labels->contains (label))
13885 asan_poison_variables (asan_poisoned_variables, false, pre_p);
13886 break;
13888 case CASE_LABEL_EXPR:
13889 ret = gimplify_case_label_expr (expr_p, pre_p);
13891 if (gimplify_ctxp->live_switch_vars)
13892 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
13893 pre_p);
13894 break;
13896 case RETURN_EXPR:
13897 ret = gimplify_return_expr (*expr_p, pre_p);
13898 break;
13900 case CONSTRUCTOR:
13901 /* Don't reduce this in place; let gimplify_init_constructor work its
13902 magic. Buf if we're just elaborating this for side effects, just
13903 gimplify any element that has side-effects. */
13904 if (fallback == fb_none)
13906 unsigned HOST_WIDE_INT ix;
13907 tree val;
13908 tree temp = NULL_TREE;
13909 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
13910 if (TREE_SIDE_EFFECTS (val))
13911 append_to_statement_list (val, &temp);
13913 *expr_p = temp;
13914 ret = temp ? GS_OK : GS_ALL_DONE;
13916 /* C99 code may assign to an array in a constructed
13917 structure or union, and this has undefined behavior only
13918 on execution, so create a temporary if an lvalue is
13919 required. */
13920 else if (fallback == fb_lvalue)
13922 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13923 mark_addressable (*expr_p);
13924 ret = GS_OK;
13926 else
13927 ret = GS_ALL_DONE;
13928 break;
13930 /* The following are special cases that are not handled by the
13931 original GIMPLE grammar. */
13933 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
13934 eliminated. */
13935 case SAVE_EXPR:
13936 ret = gimplify_save_expr (expr_p, pre_p, post_p);
13937 break;
13939 case BIT_FIELD_REF:
13940 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13941 post_p, is_gimple_lvalue, fb_either);
13942 recalculate_side_effects (*expr_p);
13943 break;
13945 case TARGET_MEM_REF:
13947 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
13949 if (TMR_BASE (*expr_p))
13950 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
13951 post_p, is_gimple_mem_ref_addr, fb_either);
13952 if (TMR_INDEX (*expr_p))
13953 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
13954 post_p, is_gimple_val, fb_rvalue);
13955 if (TMR_INDEX2 (*expr_p))
13956 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
13957 post_p, is_gimple_val, fb_rvalue);
13958 /* TMR_STEP and TMR_OFFSET are always integer constants. */
13959 ret = MIN (r0, r1);
13961 break;
13963 case NON_LVALUE_EXPR:
13964 /* This should have been stripped above. */
13965 gcc_unreachable ();
13967 case ASM_EXPR:
13968 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
13969 break;
13971 case TRY_FINALLY_EXPR:
13972 case TRY_CATCH_EXPR:
13974 gimple_seq eval, cleanup;
13975 gtry *try_;
13977 /* Calls to destructors are generated automatically in FINALLY/CATCH
13978 block. They should have location as UNKNOWN_LOCATION. However,
13979 gimplify_call_expr will reset these call stmts to input_location
13980 if it finds stmt's location is unknown. To prevent resetting for
13981 destructors, we set the input_location to unknown.
13982 Note that this only affects the destructor calls in FINALLY/CATCH
13983 block, and will automatically reset to its original value by the
13984 end of gimplify_expr. */
13985 input_location = UNKNOWN_LOCATION;
13986 eval = cleanup = NULL;
13987 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
13988 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
13989 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
13991 gimple_seq n = NULL, e = NULL;
13992 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
13993 0), &n);
13994 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
13995 1), &e);
13996 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
13998 geh_else *stmt = gimple_build_eh_else (n, e);
13999 gimple_seq_add_stmt (&cleanup, stmt);
14002 else
14003 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
14004 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
14005 if (gimple_seq_empty_p (cleanup))
14007 gimple_seq_add_seq (pre_p, eval);
14008 ret = GS_ALL_DONE;
14009 break;
14011 try_ = gimple_build_try (eval, cleanup,
14012 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
14013 ? GIMPLE_TRY_FINALLY
14014 : GIMPLE_TRY_CATCH);
14015 if (EXPR_HAS_LOCATION (save_expr))
14016 gimple_set_location (try_, EXPR_LOCATION (save_expr));
14017 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
14018 gimple_set_location (try_, saved_location);
14019 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
14020 gimple_try_set_catch_is_cleanup (try_,
14021 TRY_CATCH_IS_CLEANUP (*expr_p));
14022 gimplify_seq_add_stmt (pre_p, try_);
14023 ret = GS_ALL_DONE;
14024 break;
14027 case CLEANUP_POINT_EXPR:
14028 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
14029 break;
14031 case TARGET_EXPR:
14032 ret = gimplify_target_expr (expr_p, pre_p, post_p);
14033 break;
14035 case CATCH_EXPR:
14037 gimple *c;
14038 gimple_seq handler = NULL;
14039 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
14040 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
14041 gimplify_seq_add_stmt (pre_p, c);
14042 ret = GS_ALL_DONE;
14043 break;
14046 case EH_FILTER_EXPR:
14048 gimple *ehf;
14049 gimple_seq failure = NULL;
14051 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
14052 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
14053 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
14054 gimplify_seq_add_stmt (pre_p, ehf);
14055 ret = GS_ALL_DONE;
14056 break;
14059 case OBJ_TYPE_REF:
14061 enum gimplify_status r0, r1;
14062 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
14063 post_p, is_gimple_val, fb_rvalue);
14064 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
14065 post_p, is_gimple_val, fb_rvalue);
14066 TREE_SIDE_EFFECTS (*expr_p) = 0;
14067 ret = MIN (r0, r1);
14069 break;
14071 case LABEL_DECL:
14072 /* We get here when taking the address of a label. We mark
14073 the label as "forced"; meaning it can never be removed and
14074 it is a potential target for any computed goto. */
14075 FORCED_LABEL (*expr_p) = 1;
14076 ret = GS_ALL_DONE;
14077 break;
14079 case STATEMENT_LIST:
14080 ret = gimplify_statement_list (expr_p, pre_p);
14081 break;
14083 case WITH_SIZE_EXPR:
14085 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14086 post_p == &internal_post ? NULL : post_p,
14087 gimple_test_f, fallback);
14088 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14089 is_gimple_val, fb_rvalue);
14090 ret = GS_ALL_DONE;
14092 break;
14094 case VAR_DECL:
14095 case PARM_DECL:
14096 ret = gimplify_var_or_parm_decl (expr_p);
14097 break;
14099 case RESULT_DECL:
14100 /* When within an OMP context, notice uses of variables. */
14101 if (gimplify_omp_ctxp)
14102 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
14103 ret = GS_ALL_DONE;
14104 break;
14106 case DEBUG_EXPR_DECL:
14107 gcc_unreachable ();
14109 case DEBUG_BEGIN_STMT:
14110 gimplify_seq_add_stmt (pre_p,
14111 gimple_build_debug_begin_stmt
14112 (TREE_BLOCK (*expr_p),
14113 EXPR_LOCATION (*expr_p)));
14114 ret = GS_ALL_DONE;
14115 *expr_p = NULL;
14116 break;
14118 case SSA_NAME:
14119 /* Allow callbacks into the gimplifier during optimization. */
14120 ret = GS_ALL_DONE;
14121 break;
14123 case OMP_PARALLEL:
14124 gimplify_omp_parallel (expr_p, pre_p);
14125 ret = GS_ALL_DONE;
14126 break;
14128 case OMP_TASK:
14129 gimplify_omp_task (expr_p, pre_p);
14130 ret = GS_ALL_DONE;
14131 break;
14133 case OMP_FOR:
14134 case OMP_SIMD:
14135 case OMP_DISTRIBUTE:
14136 case OMP_TASKLOOP:
14137 case OACC_LOOP:
14138 ret = gimplify_omp_for (expr_p, pre_p);
14139 break;
14141 case OMP_LOOP:
14142 ret = gimplify_omp_loop (expr_p, pre_p);
14143 break;
14145 case OACC_CACHE:
14146 gimplify_oacc_cache (expr_p, pre_p);
14147 ret = GS_ALL_DONE;
14148 break;
14150 case OACC_DECLARE:
14151 gimplify_oacc_declare (expr_p, pre_p);
14152 ret = GS_ALL_DONE;
14153 break;
14155 case OACC_HOST_DATA:
14156 case OACC_DATA:
14157 case OACC_KERNELS:
14158 case OACC_PARALLEL:
14159 case OACC_SERIAL:
14160 case OMP_SECTIONS:
14161 case OMP_SINGLE:
14162 case OMP_TARGET:
14163 case OMP_TARGET_DATA:
14164 case OMP_TEAMS:
14165 gimplify_omp_workshare (expr_p, pre_p);
14166 ret = GS_ALL_DONE;
14167 break;
14169 case OACC_ENTER_DATA:
14170 case OACC_EXIT_DATA:
14171 case OACC_UPDATE:
14172 case OMP_TARGET_UPDATE:
14173 case OMP_TARGET_ENTER_DATA:
14174 case OMP_TARGET_EXIT_DATA:
14175 gimplify_omp_target_update (expr_p, pre_p);
14176 ret = GS_ALL_DONE;
14177 break;
14179 case OMP_SECTION:
14180 case OMP_MASTER:
14181 case OMP_ORDERED:
14182 case OMP_CRITICAL:
14183 case OMP_SCAN:
14185 gimple_seq body = NULL;
14186 gimple *g;
14187 bool saved_in_omp_construct = in_omp_construct;
14189 in_omp_construct = true;
14190 gimplify_and_add (OMP_BODY (*expr_p), &body);
14191 in_omp_construct = saved_in_omp_construct;
14192 switch (TREE_CODE (*expr_p))
14194 case OMP_SECTION:
14195 g = gimple_build_omp_section (body);
14196 break;
14197 case OMP_MASTER:
14198 g = gimple_build_omp_master (body);
14199 break;
14200 case OMP_ORDERED:
14201 g = gimplify_omp_ordered (*expr_p, body);
14202 break;
14203 case OMP_CRITICAL:
14204 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
14205 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
14206 gimplify_adjust_omp_clauses (pre_p, body,
14207 &OMP_CRITICAL_CLAUSES (*expr_p),
14208 OMP_CRITICAL);
14209 g = gimple_build_omp_critical (body,
14210 OMP_CRITICAL_NAME (*expr_p),
14211 OMP_CRITICAL_CLAUSES (*expr_p));
14212 break;
14213 case OMP_SCAN:
14214 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
14215 pre_p, ORT_WORKSHARE, OMP_SCAN);
14216 gimplify_adjust_omp_clauses (pre_p, body,
14217 &OMP_SCAN_CLAUSES (*expr_p),
14218 OMP_SCAN);
14219 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
14220 break;
14221 default:
14222 gcc_unreachable ();
14224 gimplify_seq_add_stmt (pre_p, g);
14225 ret = GS_ALL_DONE;
14226 break;
14229 case OMP_TASKGROUP:
14231 gimple_seq body = NULL;
14233 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
14234 bool saved_in_omp_construct = in_omp_construct;
14235 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
14236 OMP_TASKGROUP);
14237 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
14239 in_omp_construct = true;
14240 gimplify_and_add (OMP_BODY (*expr_p), &body);
14241 in_omp_construct = saved_in_omp_construct;
14242 gimple_seq cleanup = NULL;
14243 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
14244 gimple *g = gimple_build_call (fn, 0);
14245 gimple_seq_add_stmt (&cleanup, g);
14246 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
14247 body = NULL;
14248 gimple_seq_add_stmt (&body, g);
14249 g = gimple_build_omp_taskgroup (body, *pclauses);
14250 gimplify_seq_add_stmt (pre_p, g);
14251 ret = GS_ALL_DONE;
14252 break;
14255 case OMP_ATOMIC:
14256 case OMP_ATOMIC_READ:
14257 case OMP_ATOMIC_CAPTURE_OLD:
14258 case OMP_ATOMIC_CAPTURE_NEW:
14259 ret = gimplify_omp_atomic (expr_p, pre_p);
14260 break;
14262 case TRANSACTION_EXPR:
14263 ret = gimplify_transaction (expr_p, pre_p);
14264 break;
14266 case TRUTH_AND_EXPR:
14267 case TRUTH_OR_EXPR:
14268 case TRUTH_XOR_EXPR:
14270 tree orig_type = TREE_TYPE (*expr_p);
14271 tree new_type, xop0, xop1;
14272 *expr_p = gimple_boolify (*expr_p);
14273 new_type = TREE_TYPE (*expr_p);
14274 if (!useless_type_conversion_p (orig_type, new_type))
14276 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
14277 ret = GS_OK;
14278 break;
14281 /* Boolified binary truth expressions are semantically equivalent
14282 to bitwise binary expressions. Canonicalize them to the
14283 bitwise variant. */
14284 switch (TREE_CODE (*expr_p))
14286 case TRUTH_AND_EXPR:
14287 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
14288 break;
14289 case TRUTH_OR_EXPR:
14290 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
14291 break;
14292 case TRUTH_XOR_EXPR:
14293 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
14294 break;
14295 default:
14296 break;
14298 /* Now make sure that operands have compatible type to
14299 expression's new_type. */
14300 xop0 = TREE_OPERAND (*expr_p, 0);
14301 xop1 = TREE_OPERAND (*expr_p, 1);
14302 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
14303 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
14304 new_type,
14305 xop0);
14306 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
14307 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
14308 new_type,
14309 xop1);
14310 /* Continue classified as tcc_binary. */
14311 goto expr_2;
14314 case VEC_COND_EXPR:
14315 goto expr_3;
14317 case VEC_PERM_EXPR:
14318 /* Classified as tcc_expression. */
14319 goto expr_3;
14321 case BIT_INSERT_EXPR:
14322 /* Argument 3 is a constant. */
14323 goto expr_2;
14325 case POINTER_PLUS_EXPR:
14327 enum gimplify_status r0, r1;
14328 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14329 post_p, is_gimple_val, fb_rvalue);
14330 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14331 post_p, is_gimple_val, fb_rvalue);
14332 recalculate_side_effects (*expr_p);
14333 ret = MIN (r0, r1);
14334 break;
14337 default:
14338 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
14340 case tcc_comparison:
14341 /* Handle comparison of objects of non scalar mode aggregates
14342 with a call to memcmp. It would be nice to only have to do
14343 this for variable-sized objects, but then we'd have to allow
14344 the same nest of reference nodes we allow for MODIFY_EXPR and
14345 that's too complex.
14347 Compare scalar mode aggregates as scalar mode values. Using
14348 memcmp for them would be very inefficient at best, and is
14349 plain wrong if bitfields are involved. */
14351 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
14353 /* Vector comparisons need no boolification. */
14354 if (TREE_CODE (type) == VECTOR_TYPE)
14355 goto expr_2;
14356 else if (!AGGREGATE_TYPE_P (type))
14358 tree org_type = TREE_TYPE (*expr_p);
14359 *expr_p = gimple_boolify (*expr_p);
14360 if (!useless_type_conversion_p (org_type,
14361 TREE_TYPE (*expr_p)))
14363 *expr_p = fold_convert_loc (input_location,
14364 org_type, *expr_p);
14365 ret = GS_OK;
14367 else
14368 goto expr_2;
14370 else if (TYPE_MODE (type) != BLKmode)
14371 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
14372 else
14373 ret = gimplify_variable_sized_compare (expr_p);
14375 break;
14378 /* If *EXPR_P does not need to be special-cased, handle it
14379 according to its class. */
14380 case tcc_unary:
14381 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14382 post_p, is_gimple_val, fb_rvalue);
14383 break;
14385 case tcc_binary:
14386 expr_2:
14388 enum gimplify_status r0, r1;
14390 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14391 post_p, is_gimple_val, fb_rvalue);
14392 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14393 post_p, is_gimple_val, fb_rvalue);
14395 ret = MIN (r0, r1);
14396 break;
14399 expr_3:
14401 enum gimplify_status r0, r1, r2;
14403 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14404 post_p, is_gimple_val, fb_rvalue);
14405 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14406 post_p, is_gimple_val, fb_rvalue);
14407 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14408 post_p, is_gimple_val, fb_rvalue);
14410 ret = MIN (MIN (r0, r1), r2);
14411 break;
14414 case tcc_declaration:
14415 case tcc_constant:
14416 ret = GS_ALL_DONE;
14417 goto dont_recalculate;
14419 default:
14420 gcc_unreachable ();
14423 recalculate_side_effects (*expr_p);
14425 dont_recalculate:
14426 break;
14429 gcc_assert (*expr_p || ret != GS_OK);
14431 while (ret == GS_OK);
14433 /* If we encountered an error_mark somewhere nested inside, either
14434 stub out the statement or propagate the error back out. */
14435 if (ret == GS_ERROR)
14437 if (is_statement)
14438 *expr_p = NULL;
14439 goto out;
14442 /* This was only valid as a return value from the langhook, which
14443 we handled. Make sure it doesn't escape from any other context. */
14444 gcc_assert (ret != GS_UNHANDLED);
14446 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
14448 /* We aren't looking for a value, and we don't have a valid
14449 statement. If it doesn't have side-effects, throw it away.
14450 We can also get here with code such as "*&&L;", where L is
14451 a LABEL_DECL that is marked as FORCED_LABEL. */
14452 if (TREE_CODE (*expr_p) == LABEL_DECL
14453 || !TREE_SIDE_EFFECTS (*expr_p))
14454 *expr_p = NULL;
14455 else if (!TREE_THIS_VOLATILE (*expr_p))
14457 /* This is probably a _REF that contains something nested that
14458 has side effects. Recurse through the operands to find it. */
14459 enum tree_code code = TREE_CODE (*expr_p);
14461 switch (code)
14463 case COMPONENT_REF:
14464 case REALPART_EXPR:
14465 case IMAGPART_EXPR:
14466 case VIEW_CONVERT_EXPR:
14467 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14468 gimple_test_f, fallback);
14469 break;
14471 case ARRAY_REF:
14472 case ARRAY_RANGE_REF:
14473 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14474 gimple_test_f, fallback);
14475 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14476 gimple_test_f, fallback);
14477 break;
14479 default:
14480 /* Anything else with side-effects must be converted to
14481 a valid statement before we get here. */
14482 gcc_unreachable ();
14485 *expr_p = NULL;
14487 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
14488 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
14490 /* Historically, the compiler has treated a bare reference
14491 to a non-BLKmode volatile lvalue as forcing a load. */
14492 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
14494 /* Normally, we do not want to create a temporary for a
14495 TREE_ADDRESSABLE type because such a type should not be
14496 copied by bitwise-assignment. However, we make an
14497 exception here, as all we are doing here is ensuring that
14498 we read the bytes that make up the type. We use
14499 create_tmp_var_raw because create_tmp_var will abort when
14500 given a TREE_ADDRESSABLE type. */
14501 tree tmp = create_tmp_var_raw (type, "vol");
14502 gimple_add_tmp_var (tmp);
14503 gimplify_assign (tmp, *expr_p, pre_p);
14504 *expr_p = NULL;
14506 else
14507 /* We can't do anything useful with a volatile reference to
14508 an incomplete type, so just throw it away. Likewise for
14509 a BLKmode type, since any implicit inner load should
14510 already have been turned into an explicit one by the
14511 gimplification process. */
14512 *expr_p = NULL;
14515 /* If we are gimplifying at the statement level, we're done. Tack
14516 everything together and return. */
14517 if (fallback == fb_none || is_statement)
14519 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
14520 it out for GC to reclaim it. */
14521 *expr_p = NULL_TREE;
14523 if (!gimple_seq_empty_p (internal_pre)
14524 || !gimple_seq_empty_p (internal_post))
14526 gimplify_seq_add_seq (&internal_pre, internal_post);
14527 gimplify_seq_add_seq (pre_p, internal_pre);
14530 /* The result of gimplifying *EXPR_P is going to be the last few
14531 statements in *PRE_P and *POST_P. Add location information
14532 to all the statements that were added by the gimplification
14533 helpers. */
14534 if (!gimple_seq_empty_p (*pre_p))
14535 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
14537 if (!gimple_seq_empty_p (*post_p))
14538 annotate_all_with_location_after (*post_p, post_last_gsi,
14539 input_location);
14541 goto out;
14544 #ifdef ENABLE_GIMPLE_CHECKING
14545 if (*expr_p)
14547 enum tree_code code = TREE_CODE (*expr_p);
14548 /* These expressions should already be in gimple IR form. */
14549 gcc_assert (code != MODIFY_EXPR
14550 && code != ASM_EXPR
14551 && code != BIND_EXPR
14552 && code != CATCH_EXPR
14553 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
14554 && code != EH_FILTER_EXPR
14555 && code != GOTO_EXPR
14556 && code != LABEL_EXPR
14557 && code != LOOP_EXPR
14558 && code != SWITCH_EXPR
14559 && code != TRY_FINALLY_EXPR
14560 && code != EH_ELSE_EXPR
14561 && code != OACC_PARALLEL
14562 && code != OACC_KERNELS
14563 && code != OACC_SERIAL
14564 && code != OACC_DATA
14565 && code != OACC_HOST_DATA
14566 && code != OACC_DECLARE
14567 && code != OACC_UPDATE
14568 && code != OACC_ENTER_DATA
14569 && code != OACC_EXIT_DATA
14570 && code != OACC_CACHE
14571 && code != OMP_CRITICAL
14572 && code != OMP_FOR
14573 && code != OACC_LOOP
14574 && code != OMP_MASTER
14575 && code != OMP_TASKGROUP
14576 && code != OMP_ORDERED
14577 && code != OMP_PARALLEL
14578 && code != OMP_SCAN
14579 && code != OMP_SECTIONS
14580 && code != OMP_SECTION
14581 && code != OMP_SINGLE);
14583 #endif
14585 /* Otherwise we're gimplifying a subexpression, so the resulting
14586 value is interesting. If it's a valid operand that matches
14587 GIMPLE_TEST_F, we're done. Unless we are handling some
14588 post-effects internally; if that's the case, we need to copy into
14589 a temporary before adding the post-effects to POST_P. */
14590 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
14591 goto out;
14593 /* Otherwise, we need to create a new temporary for the gimplified
14594 expression. */
14596 /* We can't return an lvalue if we have an internal postqueue. The
14597 object the lvalue refers to would (probably) be modified by the
14598 postqueue; we need to copy the value out first, which means an
14599 rvalue. */
14600 if ((fallback & fb_lvalue)
14601 && gimple_seq_empty_p (internal_post)
14602 && is_gimple_addressable (*expr_p))
14604 /* An lvalue will do. Take the address of the expression, store it
14605 in a temporary, and replace the expression with an INDIRECT_REF of
14606 that temporary. */
14607 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
14608 unsigned int ref_align = get_object_alignment (*expr_p);
14609 tree ref_type = TREE_TYPE (*expr_p);
14610 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
14611 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
14612 if (TYPE_ALIGN (ref_type) != ref_align)
14613 ref_type = build_aligned_type (ref_type, ref_align);
14614 *expr_p = build2 (MEM_REF, ref_type,
14615 tmp, build_zero_cst (ref_alias_type));
14617 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
14619 /* An rvalue will do. Assign the gimplified expression into a
14620 new temporary TMP and replace the original expression with
14621 TMP. First, make sure that the expression has a type so that
14622 it can be assigned into a temporary. */
14623 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
14624 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
14626 else
14628 #ifdef ENABLE_GIMPLE_CHECKING
14629 if (!(fallback & fb_mayfail))
14631 fprintf (stderr, "gimplification failed:\n");
14632 print_generic_expr (stderr, *expr_p);
14633 debug_tree (*expr_p);
14634 internal_error ("gimplification failed");
14636 #endif
14637 gcc_assert (fallback & fb_mayfail);
14639 /* If this is an asm statement, and the user asked for the
14640 impossible, don't die. Fail and let gimplify_asm_expr
14641 issue an error. */
14642 ret = GS_ERROR;
14643 goto out;
14646 /* Make sure the temporary matches our predicate. */
14647 gcc_assert ((*gimple_test_f) (*expr_p));
14649 if (!gimple_seq_empty_p (internal_post))
14651 annotate_all_with_location (internal_post, input_location);
14652 gimplify_seq_add_seq (pre_p, internal_post);
14655 out:
14656 input_location = saved_location;
14657 return ret;
14660 /* Like gimplify_expr but make sure the gimplified result is not itself
14661 a SSA name (but a decl if it were). Temporaries required by
14662 evaluating *EXPR_P may be still SSA names. */
14664 static enum gimplify_status
14665 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
14666 bool (*gimple_test_f) (tree), fallback_t fallback,
14667 bool allow_ssa)
14669 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
14670 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
14671 gimple_test_f, fallback);
14672 if (! allow_ssa
14673 && TREE_CODE (*expr_p) == SSA_NAME)
14675 tree name = *expr_p;
14676 if (was_ssa_name_p)
14677 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
14678 else
14680 /* Avoid the extra copy if possible. */
14681 *expr_p = create_tmp_reg (TREE_TYPE (name));
14682 if (!gimple_nop_p (SSA_NAME_DEF_STMT (name)))
14683 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
14684 release_ssa_name (name);
14687 return ret;
14690 /* Look through TYPE for variable-sized objects and gimplify each such
14691 size that we find. Add to LIST_P any statements generated. */
14693 void
14694 gimplify_type_sizes (tree type, gimple_seq *list_p)
14696 tree field, t;
14698 if (type == NULL || type == error_mark_node)
14699 return;
14701 /* We first do the main variant, then copy into any other variants. */
14702 type = TYPE_MAIN_VARIANT (type);
14704 /* Avoid infinite recursion. */
14705 if (TYPE_SIZES_GIMPLIFIED (type))
14706 return;
14708 TYPE_SIZES_GIMPLIFIED (type) = 1;
14710 switch (TREE_CODE (type))
14712 case INTEGER_TYPE:
14713 case ENUMERAL_TYPE:
14714 case BOOLEAN_TYPE:
14715 case REAL_TYPE:
14716 case FIXED_POINT_TYPE:
14717 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
14718 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
14720 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
14722 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
14723 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
14725 break;
14727 case ARRAY_TYPE:
14728 /* These types may not have declarations, so handle them here. */
14729 gimplify_type_sizes (TREE_TYPE (type), list_p);
14730 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
14731 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
14732 with assigned stack slots, for -O1+ -g they should be tracked
14733 by VTA. */
14734 if (!(TYPE_NAME (type)
14735 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
14736 && DECL_IGNORED_P (TYPE_NAME (type)))
14737 && TYPE_DOMAIN (type)
14738 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
14740 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
14741 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
14742 DECL_IGNORED_P (t) = 0;
14743 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
14744 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
14745 DECL_IGNORED_P (t) = 0;
14747 break;
14749 case RECORD_TYPE:
14750 case UNION_TYPE:
14751 case QUAL_UNION_TYPE:
14752 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14753 if (TREE_CODE (field) == FIELD_DECL)
14755 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
14756 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
14757 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
14758 gimplify_type_sizes (TREE_TYPE (field), list_p);
14760 break;
14762 case POINTER_TYPE:
14763 case REFERENCE_TYPE:
14764 /* We used to recurse on the pointed-to type here, which turned out to
14765 be incorrect because its definition might refer to variables not
14766 yet initialized at this point if a forward declaration is involved.
14768 It was actually useful for anonymous pointed-to types to ensure
14769 that the sizes evaluation dominates every possible later use of the
14770 values. Restricting to such types here would be safe since there
14771 is no possible forward declaration around, but would introduce an
14772 undesirable middle-end semantic to anonymity. We then defer to
14773 front-ends the responsibility of ensuring that the sizes are
14774 evaluated both early and late enough, e.g. by attaching artificial
14775 type declarations to the tree. */
14776 break;
14778 default:
14779 break;
14782 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
14783 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
14785 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
14787 TYPE_SIZE (t) = TYPE_SIZE (type);
14788 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
14789 TYPE_SIZES_GIMPLIFIED (t) = 1;
14793 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
14794 a size or position, has had all of its SAVE_EXPRs evaluated.
14795 We add any required statements to *STMT_P. */
14797 void
14798 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
14800 tree expr = *expr_p;
14802 /* We don't do anything if the value isn't there, is constant, or contains
14803 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
14804 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
14805 will want to replace it with a new variable, but that will cause problems
14806 if this type is from outside the function. It's OK to have that here. */
14807 if (expr == NULL_TREE
14808 || is_gimple_constant (expr)
14809 || TREE_CODE (expr) == VAR_DECL
14810 || CONTAINS_PLACEHOLDER_P (expr))
14811 return;
14813 *expr_p = unshare_expr (expr);
14815 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
14816 if the def vanishes. */
14817 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
14819 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
14820 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
14821 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
14822 if (is_gimple_constant (*expr_p))
14823 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
14826 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
14827 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
14828 is true, also gimplify the parameters. */
14830 gbind *
14831 gimplify_body (tree fndecl, bool do_parms)
14833 location_t saved_location = input_location;
14834 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
14835 gimple *outer_stmt;
14836 gbind *outer_bind;
14838 timevar_push (TV_TREE_GIMPLIFY);
14840 init_tree_ssa (cfun);
14842 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
14843 gimplification. */
14844 default_rtl_profile ();
14846 gcc_assert (gimplify_ctxp == NULL);
14847 push_gimplify_context (true);
14849 if (flag_openacc || flag_openmp)
14851 gcc_assert (gimplify_omp_ctxp == NULL);
14852 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
14853 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
14856 /* Unshare most shared trees in the body and in that of any nested functions.
14857 It would seem we don't have to do this for nested functions because
14858 they are supposed to be output and then the outer function gimplified
14859 first, but the g++ front end doesn't always do it that way. */
14860 unshare_body (fndecl);
14861 unvisit_body (fndecl);
14863 /* Make sure input_location isn't set to something weird. */
14864 input_location = DECL_SOURCE_LOCATION (fndecl);
14866 /* Resolve callee-copies. This has to be done before processing
14867 the body so that DECL_VALUE_EXPR gets processed correctly. */
14868 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
14870 /* Gimplify the function's body. */
14871 seq = NULL;
14872 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
14873 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
14874 if (!outer_stmt)
14876 outer_stmt = gimple_build_nop ();
14877 gimplify_seq_add_stmt (&seq, outer_stmt);
14880 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
14881 not the case, wrap everything in a GIMPLE_BIND to make it so. */
14882 if (gimple_code (outer_stmt) == GIMPLE_BIND
14883 && (gimple_seq_first_nondebug_stmt (seq)
14884 == gimple_seq_last_nondebug_stmt (seq)))
14886 outer_bind = as_a <gbind *> (outer_stmt);
14887 if (gimple_seq_first_stmt (seq) != outer_stmt
14888 || gimple_seq_last_stmt (seq) != outer_stmt)
14890 /* If there are debug stmts before or after outer_stmt, move them
14891 inside of outer_bind body. */
14892 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
14893 gimple_seq second_seq = NULL;
14894 if (gimple_seq_first_stmt (seq) != outer_stmt
14895 && gimple_seq_last_stmt (seq) != outer_stmt)
14897 second_seq = gsi_split_seq_after (gsi);
14898 gsi_remove (&gsi, false);
14900 else if (gimple_seq_first_stmt (seq) != outer_stmt)
14901 gsi_remove (&gsi, false);
14902 else
14904 gsi_remove (&gsi, false);
14905 second_seq = seq;
14906 seq = NULL;
14908 gimple_seq_add_seq_without_update (&seq,
14909 gimple_bind_body (outer_bind));
14910 gimple_seq_add_seq_without_update (&seq, second_seq);
14911 gimple_bind_set_body (outer_bind, seq);
14914 else
14915 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
14917 DECL_SAVED_TREE (fndecl) = NULL_TREE;
14919 /* If we had callee-copies statements, insert them at the beginning
14920 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
14921 if (!gimple_seq_empty_p (parm_stmts))
14923 tree parm;
14925 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
14926 if (parm_cleanup)
14928 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
14929 GIMPLE_TRY_FINALLY);
14930 parm_stmts = NULL;
14931 gimple_seq_add_stmt (&parm_stmts, g);
14933 gimple_bind_set_body (outer_bind, parm_stmts);
14935 for (parm = DECL_ARGUMENTS (current_function_decl);
14936 parm; parm = DECL_CHAIN (parm))
14937 if (DECL_HAS_VALUE_EXPR_P (parm))
14939 DECL_HAS_VALUE_EXPR_P (parm) = 0;
14940 DECL_IGNORED_P (parm) = 0;
14944 if ((flag_openacc || flag_openmp || flag_openmp_simd)
14945 && gimplify_omp_ctxp)
14947 delete_omp_context (gimplify_omp_ctxp);
14948 gimplify_omp_ctxp = NULL;
14951 pop_gimplify_context (outer_bind);
14952 gcc_assert (gimplify_ctxp == NULL);
14954 if (flag_checking && !seen_error ())
14955 verify_gimple_in_seq (gimple_bind_body (outer_bind));
14957 timevar_pop (TV_TREE_GIMPLIFY);
14958 input_location = saved_location;
14960 return outer_bind;
14963 typedef char *char_p; /* For DEF_VEC_P. */
14965 /* Return whether we should exclude FNDECL from instrumentation. */
14967 static bool
14968 flag_instrument_functions_exclude_p (tree fndecl)
14970 vec<char_p> *v;
14972 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
14973 if (v && v->length () > 0)
14975 const char *name;
14976 int i;
14977 char *s;
14979 name = lang_hooks.decl_printable_name (fndecl, 1);
14980 FOR_EACH_VEC_ELT (*v, i, s)
14981 if (strstr (name, s) != NULL)
14982 return true;
14985 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
14986 if (v && v->length () > 0)
14988 const char *name;
14989 int i;
14990 char *s;
14992 name = DECL_SOURCE_FILE (fndecl);
14993 FOR_EACH_VEC_ELT (*v, i, s)
14994 if (strstr (name, s) != NULL)
14995 return true;
14998 return false;
15001 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
15002 node for the function we want to gimplify.
15004 Return the sequence of GIMPLE statements corresponding to the body
15005 of FNDECL. */
15007 void
15008 gimplify_function_tree (tree fndecl)
15010 gimple_seq seq;
15011 gbind *bind;
15013 gcc_assert (!gimple_body (fndecl));
15015 if (DECL_STRUCT_FUNCTION (fndecl))
15016 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
15017 else
15018 push_struct_function (fndecl);
15020 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
15021 if necessary. */
15022 cfun->curr_properties |= PROP_gimple_lva;
15024 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
15025 asan_poisoned_variables = new hash_set<tree> ();
15026 bind = gimplify_body (fndecl, true);
15027 if (asan_poisoned_variables)
15029 delete asan_poisoned_variables;
15030 asan_poisoned_variables = NULL;
15033 /* The tree body of the function is no longer needed, replace it
15034 with the new GIMPLE body. */
15035 seq = NULL;
15036 gimple_seq_add_stmt (&seq, bind);
15037 gimple_set_body (fndecl, seq);
15039 /* If we're instrumenting function entry/exit, then prepend the call to
15040 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
15041 catch the exit hook. */
15042 /* ??? Add some way to ignore exceptions for this TFE. */
15043 if (flag_instrument_function_entry_exit
15044 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
15045 /* Do not instrument extern inline functions. */
15046 && !(DECL_DECLARED_INLINE_P (fndecl)
15047 && DECL_EXTERNAL (fndecl)
15048 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
15049 && !flag_instrument_functions_exclude_p (fndecl))
15051 tree x;
15052 gbind *new_bind;
15053 gimple *tf;
15054 gimple_seq cleanup = NULL, body = NULL;
15055 tree tmp_var, this_fn_addr;
15056 gcall *call;
15058 /* The instrumentation hooks aren't going to call the instrumented
15059 function and the address they receive is expected to be matchable
15060 against symbol addresses. Make sure we don't create a trampoline,
15061 in case the current function is nested. */
15062 this_fn_addr = build_fold_addr_expr (current_function_decl);
15063 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
15065 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15066 call = gimple_build_call (x, 1, integer_zero_node);
15067 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15068 gimple_call_set_lhs (call, tmp_var);
15069 gimplify_seq_add_stmt (&cleanup, call);
15070 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
15071 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15072 gimplify_seq_add_stmt (&cleanup, call);
15073 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
15075 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15076 call = gimple_build_call (x, 1, integer_zero_node);
15077 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15078 gimple_call_set_lhs (call, tmp_var);
15079 gimplify_seq_add_stmt (&body, call);
15080 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
15081 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15082 gimplify_seq_add_stmt (&body, call);
15083 gimplify_seq_add_stmt (&body, tf);
15084 new_bind = gimple_build_bind (NULL, body, NULL);
15086 /* Replace the current function body with the body
15087 wrapped in the try/finally TF. */
15088 seq = NULL;
15089 gimple_seq_add_stmt (&seq, new_bind);
15090 gimple_set_body (fndecl, seq);
15091 bind = new_bind;
15094 if (sanitize_flags_p (SANITIZE_THREAD)
15095 && param_tsan_instrument_func_entry_exit)
15097 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
15098 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
15099 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
15100 /* Replace the current function body with the body
15101 wrapped in the try/finally TF. */
15102 seq = NULL;
15103 gimple_seq_add_stmt (&seq, new_bind);
15104 gimple_set_body (fndecl, seq);
15107 DECL_SAVED_TREE (fndecl) = NULL_TREE;
15108 cfun->curr_properties |= PROP_gimple_any;
15110 pop_cfun ();
15112 dump_function (TDI_gimple, fndecl);
15115 /* Return a dummy expression of type TYPE in order to keep going after an
15116 error. */
15118 static tree
15119 dummy_object (tree type)
15121 tree t = build_int_cst (build_pointer_type (type), 0);
15122 return build2 (MEM_REF, type, t, t);
15125 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
15126 builtin function, but a very special sort of operator. */
15128 enum gimplify_status
15129 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
15130 gimple_seq *post_p ATTRIBUTE_UNUSED)
15132 tree promoted_type, have_va_type;
15133 tree valist = TREE_OPERAND (*expr_p, 0);
15134 tree type = TREE_TYPE (*expr_p);
15135 tree t, tag, aptag;
15136 location_t loc = EXPR_LOCATION (*expr_p);
15138 /* Verify that valist is of the proper type. */
15139 have_va_type = TREE_TYPE (valist);
15140 if (have_va_type == error_mark_node)
15141 return GS_ERROR;
15142 have_va_type = targetm.canonical_va_list_type (have_va_type);
15143 if (have_va_type == NULL_TREE
15144 && POINTER_TYPE_P (TREE_TYPE (valist)))
15145 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
15146 have_va_type
15147 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
15148 gcc_assert (have_va_type != NULL_TREE);
15150 /* Generate a diagnostic for requesting data of a type that cannot
15151 be passed through `...' due to type promotion at the call site. */
15152 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
15153 != type)
15155 static bool gave_help;
15156 bool warned;
15157 /* Use the expansion point to handle cases such as passing bool (defined
15158 in a system header) through `...'. */
15159 location_t xloc
15160 = expansion_point_location_if_in_system_header (loc);
15162 /* Unfortunately, this is merely undefined, rather than a constraint
15163 violation, so we cannot make this an error. If this call is never
15164 executed, the program is still strictly conforming. */
15165 auto_diagnostic_group d;
15166 warned = warning_at (xloc, 0,
15167 "%qT is promoted to %qT when passed through %<...%>",
15168 type, promoted_type);
15169 if (!gave_help && warned)
15171 gave_help = true;
15172 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
15173 promoted_type, type);
15176 /* We can, however, treat "undefined" any way we please.
15177 Call abort to encourage the user to fix the program. */
15178 if (warned)
15179 inform (xloc, "if this code is reached, the program will abort");
15180 /* Before the abort, allow the evaluation of the va_list
15181 expression to exit or longjmp. */
15182 gimplify_and_add (valist, pre_p);
15183 t = build_call_expr_loc (loc,
15184 builtin_decl_implicit (BUILT_IN_TRAP), 0);
15185 gimplify_and_add (t, pre_p);
15187 /* This is dead code, but go ahead and finish so that the
15188 mode of the result comes out right. */
15189 *expr_p = dummy_object (type);
15190 return GS_ALL_DONE;
15193 tag = build_int_cst (build_pointer_type (type), 0);
15194 aptag = build_int_cst (TREE_TYPE (valist), 0);
15196 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
15197 valist, tag, aptag);
15199 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
15200 needs to be expanded. */
15201 cfun->curr_properties &= ~PROP_gimple_lva;
15203 return GS_OK;
15206 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
15208 DST/SRC are the destination and source respectively. You can pass
15209 ungimplified trees in DST or SRC, in which case they will be
15210 converted to a gimple operand if necessary.
15212 This function returns the newly created GIMPLE_ASSIGN tuple. */
15214 gimple *
15215 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
15217 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
15218 gimplify_and_add (t, seq_p);
15219 ggc_free (t);
15220 return gimple_seq_last_stmt (*seq_p);
15223 inline hashval_t
15224 gimplify_hasher::hash (const elt_t *p)
15226 tree t = p->val;
15227 return iterative_hash_expr (t, 0);
15230 inline bool
15231 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
15233 tree t1 = p1->val;
15234 tree t2 = p2->val;
15235 enum tree_code code = TREE_CODE (t1);
15237 if (TREE_CODE (t2) != code
15238 || TREE_TYPE (t1) != TREE_TYPE (t2))
15239 return false;
15241 if (!operand_equal_p (t1, t2, 0))
15242 return false;
15244 /* Only allow them to compare equal if they also hash equal; otherwise
15245 results are nondeterminate, and we fail bootstrap comparison. */
15246 gcc_checking_assert (hash (p1) == hash (p2));
15248 return true;