re PR fortran/89639 (FAIL: gfortran.dg/ieee/ieee_9.f90 -O0 (test for excess errors))
[official-gcc.git] / gcc / gimplify.c
blob6d7a314719dec98f7da83f895c77cbb6358437e6
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2019 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
69 /* Hash set of poisoned variables in a bind expr. */
70 static hash_set<tree> *asan_poisoned_variables = NULL;
72 enum gimplify_omp_var_data
74 GOVD_SEEN = 1,
75 GOVD_EXPLICIT = 2,
76 GOVD_SHARED = 4,
77 GOVD_PRIVATE = 8,
78 GOVD_FIRSTPRIVATE = 16,
79 GOVD_LASTPRIVATE = 32,
80 GOVD_REDUCTION = 64,
81 GOVD_LOCAL = 128,
82 GOVD_MAP = 256,
83 GOVD_DEBUG_PRIVATE = 512,
84 GOVD_PRIVATE_OUTER_REF = 1024,
85 GOVD_LINEAR = 2048,
86 GOVD_ALIGNED = 4096,
88 /* Flag for GOVD_MAP: don't copy back. */
89 GOVD_MAP_TO_ONLY = 8192,
91 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
92 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
94 GOVD_MAP_0LEN_ARRAY = 32768,
96 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
97 GOVD_MAP_ALWAYS_TO = 65536,
99 /* Flag for shared vars that are or might be stored to in the region. */
100 GOVD_WRITTEN = 131072,
102 /* Flag for GOVD_MAP, if it is a forced mapping. */
103 GOVD_MAP_FORCE = 262144,
105 /* Flag for GOVD_MAP: must be present already. */
106 GOVD_MAP_FORCE_PRESENT = 524288,
108 /* Flag for GOVD_MAP: only allocate. */
109 GOVD_MAP_ALLOC_ONLY = 1048576,
111 /* Flag for GOVD_MAP: only copy back. */
112 GOVD_MAP_FROM_ONLY = 2097152,
114 GOVD_NONTEMPORAL = 4194304,
116 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
117 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
118 | GOVD_LOCAL)
122 enum omp_region_type
124 ORT_WORKSHARE = 0x00,
125 ORT_TASKGROUP = 0x01,
126 ORT_SIMD = 0x04,
128 ORT_PARALLEL = 0x08,
129 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
131 ORT_TASK = 0x10,
132 ORT_UNTIED_TASK = ORT_TASK | 1,
133 ORT_TASKLOOP = ORT_TASK | 2,
134 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
136 ORT_TEAMS = 0x20,
137 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
138 ORT_HOST_TEAMS = ORT_TEAMS | 2,
139 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
141 /* Data region. */
142 ORT_TARGET_DATA = 0x40,
144 /* Data region with offloading. */
145 ORT_TARGET = 0x80,
146 ORT_COMBINED_TARGET = ORT_TARGET | 1,
148 /* OpenACC variants. */
149 ORT_ACC = 0x100, /* A generic OpenACC region. */
150 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
151 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
152 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
153 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
155 /* Dummy OpenMP region, used to disable expansion of
156 DECL_VALUE_EXPRs in taskloop pre body. */
157 ORT_NONE = 0x200
160 /* Gimplify hashtable helper. */
162 struct gimplify_hasher : free_ptr_hash <elt_t>
164 static inline hashval_t hash (const elt_t *);
165 static inline bool equal (const elt_t *, const elt_t *);
168 struct gimplify_ctx
170 struct gimplify_ctx *prev_context;
172 vec<gbind *> bind_expr_stack;
173 tree temps;
174 gimple_seq conditional_cleanups;
175 tree exit_label;
176 tree return_temp;
178 vec<tree> case_labels;
179 hash_set<tree> *live_switch_vars;
180 /* The formal temporary table. Should this be persistent? */
181 hash_table<gimplify_hasher> *temp_htab;
183 int conditions;
184 unsigned into_ssa : 1;
185 unsigned allow_rhs_cond_expr : 1;
186 unsigned in_cleanup_point_expr : 1;
187 unsigned keep_stack : 1;
188 unsigned save_stack : 1;
189 unsigned in_switch_expr : 1;
192 enum gimplify_defaultmap_kind
194 GDMK_SCALAR,
195 GDMK_AGGREGATE,
196 GDMK_ALLOCATABLE,
197 GDMK_POINTER
200 struct gimplify_omp_ctx
202 struct gimplify_omp_ctx *outer_context;
203 splay_tree variables;
204 hash_set<tree> *privatized_types;
205 /* Iteration variables in an OMP_FOR. */
206 vec<tree> loop_iter_var;
207 location_t location;
208 enum omp_clause_default_kind default_kind;
209 enum omp_region_type region_type;
210 bool combined_loop;
211 bool distribute;
212 bool target_firstprivatize_array_bases;
213 int defaultmap[4];
216 static struct gimplify_ctx *gimplify_ctxp;
217 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
219 /* Forward declaration. */
220 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
221 static hash_map<tree, tree> *oacc_declare_returns;
222 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
223 bool (*) (tree), fallback_t, bool);
225 /* Shorter alias name for the above function for use in gimplify.c
226 only. */
228 static inline void
229 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
231 gimple_seq_add_stmt_without_update (seq_p, gs);
234 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
235 NULL, a new sequence is allocated. This function is
236 similar to gimple_seq_add_seq, but does not scan the operands.
237 During gimplification, we need to manipulate statement sequences
238 before the def/use vectors have been constructed. */
240 static void
241 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
243 gimple_stmt_iterator si;
245 if (src == NULL)
246 return;
248 si = gsi_last (*dst_p);
249 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
253 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
254 and popping gimplify contexts. */
256 static struct gimplify_ctx *ctx_pool = NULL;
258 /* Return a gimplify context struct from the pool. */
260 static inline struct gimplify_ctx *
261 ctx_alloc (void)
263 struct gimplify_ctx * c = ctx_pool;
265 if (c)
266 ctx_pool = c->prev_context;
267 else
268 c = XNEW (struct gimplify_ctx);
270 memset (c, '\0', sizeof (*c));
271 return c;
274 /* Put gimplify context C back into the pool. */
276 static inline void
277 ctx_free (struct gimplify_ctx *c)
279 c->prev_context = ctx_pool;
280 ctx_pool = c;
283 /* Free allocated ctx stack memory. */
285 void
286 free_gimplify_stack (void)
288 struct gimplify_ctx *c;
290 while ((c = ctx_pool))
292 ctx_pool = c->prev_context;
293 free (c);
298 /* Set up a context for the gimplifier. */
300 void
301 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
303 struct gimplify_ctx *c = ctx_alloc ();
305 c->prev_context = gimplify_ctxp;
306 gimplify_ctxp = c;
307 gimplify_ctxp->into_ssa = in_ssa;
308 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
311 /* Tear down a context for the gimplifier. If BODY is non-null, then
312 put the temporaries into the outer BIND_EXPR. Otherwise, put them
313 in the local_decls.
315 BODY is not a sequence, but the first tuple in a sequence. */
317 void
318 pop_gimplify_context (gimple *body)
320 struct gimplify_ctx *c = gimplify_ctxp;
322 gcc_assert (c
323 && (!c->bind_expr_stack.exists ()
324 || c->bind_expr_stack.is_empty ()));
325 c->bind_expr_stack.release ();
326 gimplify_ctxp = c->prev_context;
328 if (body)
329 declare_vars (c->temps, body, false);
330 else
331 record_vars (c->temps);
333 delete c->temp_htab;
334 c->temp_htab = NULL;
335 ctx_free (c);
338 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
340 static void
341 gimple_push_bind_expr (gbind *bind_stmt)
343 gimplify_ctxp->bind_expr_stack.reserve (8);
344 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
347 /* Pop the first element off the stack of bindings. */
349 static void
350 gimple_pop_bind_expr (void)
352 gimplify_ctxp->bind_expr_stack.pop ();
355 /* Return the first element of the stack of bindings. */
357 gbind *
358 gimple_current_bind_expr (void)
360 return gimplify_ctxp->bind_expr_stack.last ();
363 /* Return the stack of bindings created during gimplification. */
365 vec<gbind *>
366 gimple_bind_expr_stack (void)
368 return gimplify_ctxp->bind_expr_stack;
371 /* Return true iff there is a COND_EXPR between us and the innermost
372 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
374 static bool
375 gimple_conditional_context (void)
377 return gimplify_ctxp->conditions > 0;
380 /* Note that we've entered a COND_EXPR. */
382 static void
383 gimple_push_condition (void)
385 #ifdef ENABLE_GIMPLE_CHECKING
386 if (gimplify_ctxp->conditions == 0)
387 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
388 #endif
389 ++(gimplify_ctxp->conditions);
392 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
393 now, add any conditional cleanups we've seen to the prequeue. */
395 static void
396 gimple_pop_condition (gimple_seq *pre_p)
398 int conds = --(gimplify_ctxp->conditions);
400 gcc_assert (conds >= 0);
401 if (conds == 0)
403 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
404 gimplify_ctxp->conditional_cleanups = NULL;
408 /* A stable comparison routine for use with splay trees and DECLs. */
410 static int
411 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
413 tree a = (tree) xa;
414 tree b = (tree) xb;
416 return DECL_UID (a) - DECL_UID (b);
419 /* Create a new omp construct that deals with variable remapping. */
421 static struct gimplify_omp_ctx *
422 new_omp_context (enum omp_region_type region_type)
424 struct gimplify_omp_ctx *c;
426 c = XCNEW (struct gimplify_omp_ctx);
427 c->outer_context = gimplify_omp_ctxp;
428 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
429 c->privatized_types = new hash_set<tree>;
430 c->location = input_location;
431 c->region_type = region_type;
432 if ((region_type & ORT_TASK) == 0)
433 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
434 else
435 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
436 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
437 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
438 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
439 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
441 return c;
444 /* Destroy an omp construct that deals with variable remapping. */
446 static void
447 delete_omp_context (struct gimplify_omp_ctx *c)
449 splay_tree_delete (c->variables);
450 delete c->privatized_types;
451 c->loop_iter_var.release ();
452 XDELETE (c);
455 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
456 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
458 /* Both gimplify the statement T and append it to *SEQ_P. This function
459 behaves exactly as gimplify_stmt, but you don't have to pass T as a
460 reference. */
462 void
463 gimplify_and_add (tree t, gimple_seq *seq_p)
465 gimplify_stmt (&t, seq_p);
468 /* Gimplify statement T into sequence *SEQ_P, and return the first
469 tuple in the sequence of generated tuples for this statement.
470 Return NULL if gimplifying T produced no tuples. */
472 static gimple *
473 gimplify_and_return_first (tree t, gimple_seq *seq_p)
475 gimple_stmt_iterator last = gsi_last (*seq_p);
477 gimplify_and_add (t, seq_p);
479 if (!gsi_end_p (last))
481 gsi_next (&last);
482 return gsi_stmt (last);
484 else
485 return gimple_seq_first_stmt (*seq_p);
488 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
489 LHS, or for a call argument. */
491 static bool
492 is_gimple_mem_rhs (tree t)
494 /* If we're dealing with a renamable type, either source or dest must be
495 a renamed variable. */
496 if (is_gimple_reg_type (TREE_TYPE (t)))
497 return is_gimple_val (t);
498 else
499 return is_gimple_val (t) || is_gimple_lvalue (t);
502 /* Return true if T is a CALL_EXPR or an expression that can be
503 assigned to a temporary. Note that this predicate should only be
504 used during gimplification. See the rationale for this in
505 gimplify_modify_expr. */
507 static bool
508 is_gimple_reg_rhs_or_call (tree t)
510 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
511 || TREE_CODE (t) == CALL_EXPR);
514 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
515 this predicate should only be used during gimplification. See the
516 rationale for this in gimplify_modify_expr. */
518 static bool
519 is_gimple_mem_rhs_or_call (tree t)
521 /* If we're dealing with a renamable type, either source or dest must be
522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t)))
524 return is_gimple_val (t);
525 else
526 return (is_gimple_val (t)
527 || is_gimple_lvalue (t)
528 || TREE_CLOBBER_P (t)
529 || TREE_CODE (t) == CALL_EXPR);
532 /* Create a temporary with a name derived from VAL. Subroutine of
533 lookup_tmp_var; nobody else should call this function. */
535 static inline tree
536 create_tmp_from_val (tree val)
538 /* Drop all qualifiers and address-space information from the value type. */
539 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
540 tree var = create_tmp_var (type, get_name (val));
541 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
542 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
543 DECL_GIMPLE_REG_P (var) = 1;
544 return var;
547 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
548 an existing expression temporary. */
550 static tree
551 lookup_tmp_var (tree val, bool is_formal)
553 tree ret;
555 /* If not optimizing, never really reuse a temporary. local-alloc
556 won't allocate any variable that is used in more than one basic
557 block, which means it will go into memory, causing much extra
558 work in reload and final and poorer code generation, outweighing
559 the extra memory allocation here. */
560 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
561 ret = create_tmp_from_val (val);
562 else
564 elt_t elt, *elt_p;
565 elt_t **slot;
567 elt.val = val;
568 if (!gimplify_ctxp->temp_htab)
569 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
570 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
571 if (*slot == NULL)
573 elt_p = XNEW (elt_t);
574 elt_p->val = val;
575 elt_p->temp = ret = create_tmp_from_val (val);
576 *slot = elt_p;
578 else
580 elt_p = *slot;
581 ret = elt_p->temp;
585 return ret;
588 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
590 static tree
591 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
592 bool is_formal, bool allow_ssa)
594 tree t, mod;
596 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
597 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
598 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
599 fb_rvalue);
601 if (allow_ssa
602 && gimplify_ctxp->into_ssa
603 && is_gimple_reg_type (TREE_TYPE (val)))
605 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
606 if (! gimple_in_ssa_p (cfun))
608 const char *name = get_name (val);
609 if (name)
610 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
613 else
614 t = lookup_tmp_var (val, is_formal);
616 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
618 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
620 /* gimplify_modify_expr might want to reduce this further. */
621 gimplify_and_add (mod, pre_p);
622 ggc_free (mod);
624 return t;
627 /* Return a formal temporary variable initialized with VAL. PRE_P is as
628 in gimplify_expr. Only use this function if:
630 1) The value of the unfactored expression represented by VAL will not
631 change between the initialization and use of the temporary, and
632 2) The temporary will not be otherwise modified.
634 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
635 and #2 means it is inappropriate for && temps.
637 For other cases, use get_initialized_tmp_var instead. */
639 tree
640 get_formal_tmp_var (tree val, gimple_seq *pre_p)
642 return internal_get_tmp_var (val, pre_p, NULL, true, true);
645 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
646 are as in gimplify_expr. */
648 tree
649 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
650 bool allow_ssa)
652 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
655 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
656 generate debug info for them; otherwise don't. */
658 void
659 declare_vars (tree vars, gimple *gs, bool debug_info)
661 tree last = vars;
662 if (last)
664 tree temps, block;
666 gbind *scope = as_a <gbind *> (gs);
668 temps = nreverse (last);
670 block = gimple_bind_block (scope);
671 gcc_assert (!block || TREE_CODE (block) == BLOCK);
672 if (!block || !debug_info)
674 DECL_CHAIN (last) = gimple_bind_vars (scope);
675 gimple_bind_set_vars (scope, temps);
677 else
679 /* We need to attach the nodes both to the BIND_EXPR and to its
680 associated BLOCK for debugging purposes. The key point here
681 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
682 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
683 if (BLOCK_VARS (block))
684 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
685 else
687 gimple_bind_set_vars (scope,
688 chainon (gimple_bind_vars (scope), temps));
689 BLOCK_VARS (block) = temps;
695 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
696 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
697 no such upper bound can be obtained. */
699 static void
700 force_constant_size (tree var)
702 /* The only attempt we make is by querying the maximum size of objects
703 of the variable's type. */
705 HOST_WIDE_INT max_size;
707 gcc_assert (VAR_P (var));
709 max_size = max_int_size_in_bytes (TREE_TYPE (var));
711 gcc_assert (max_size >= 0);
713 DECL_SIZE_UNIT (var)
714 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
715 DECL_SIZE (var)
716 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
719 /* Push the temporary variable TMP into the current binding. */
721 void
722 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
724 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
726 /* Later processing assumes that the object size is constant, which might
727 not be true at this point. Force the use of a constant upper bound in
728 this case. */
729 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
730 force_constant_size (tmp);
732 DECL_CONTEXT (tmp) = fn->decl;
733 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
735 record_vars_into (tmp, fn->decl);
738 /* Push the temporary variable TMP into the current binding. */
740 void
741 gimple_add_tmp_var (tree tmp)
743 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
745 /* Later processing assumes that the object size is constant, which might
746 not be true at this point. Force the use of a constant upper bound in
747 this case. */
748 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
749 force_constant_size (tmp);
751 DECL_CONTEXT (tmp) = current_function_decl;
752 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
754 if (gimplify_ctxp)
756 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
757 gimplify_ctxp->temps = tmp;
759 /* Mark temporaries local within the nearest enclosing parallel. */
760 if (gimplify_omp_ctxp)
762 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
763 while (ctx
764 && (ctx->region_type == ORT_WORKSHARE
765 || ctx->region_type == ORT_TASKGROUP
766 || ctx->region_type == ORT_SIMD
767 || ctx->region_type == ORT_ACC))
768 ctx = ctx->outer_context;
769 if (ctx)
770 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
773 else if (cfun)
774 record_vars (tmp);
775 else
777 gimple_seq body_seq;
779 /* This case is for nested functions. We need to expose the locals
780 they create. */
781 body_seq = gimple_body (current_function_decl);
782 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
788 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
789 nodes that are referenced more than once in GENERIC functions. This is
790 necessary because gimplification (translation into GIMPLE) is performed
791 by modifying tree nodes in-place, so gimplication of a shared node in a
792 first context could generate an invalid GIMPLE form in a second context.
794 This is achieved with a simple mark/copy/unmark algorithm that walks the
795 GENERIC representation top-down, marks nodes with TREE_VISITED the first
796 time it encounters them, duplicates them if they already have TREE_VISITED
797 set, and finally removes the TREE_VISITED marks it has set.
799 The algorithm works only at the function level, i.e. it generates a GENERIC
800 representation of a function with no nodes shared within the function when
801 passed a GENERIC function (except for nodes that are allowed to be shared).
803 At the global level, it is also necessary to unshare tree nodes that are
804 referenced in more than one function, for the same aforementioned reason.
805 This requires some cooperation from the front-end. There are 2 strategies:
807 1. Manual unsharing. The front-end needs to call unshare_expr on every
808 expression that might end up being shared across functions.
810 2. Deep unsharing. This is an extension of regular unsharing. Instead
811 of calling unshare_expr on expressions that might be shared across
812 functions, the front-end pre-marks them with TREE_VISITED. This will
813 ensure that they are unshared on the first reference within functions
814 when the regular unsharing algorithm runs. The counterpart is that
815 this algorithm must look deeper than for manual unsharing, which is
816 specified by LANG_HOOKS_DEEP_UNSHARING.
818 If there are only few specific cases of node sharing across functions, it is
819 probably easier for a front-end to unshare the expressions manually. On the
820 contrary, if the expressions generated at the global level are as widespread
821 as expressions generated within functions, deep unsharing is very likely the
822 way to go. */
824 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
825 These nodes model computations that must be done once. If we were to
826 unshare something like SAVE_EXPR(i++), the gimplification process would
827 create wrong code. However, if DATA is non-null, it must hold a pointer
828 set that is used to unshare the subtrees of these nodes. */
830 static tree
831 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
833 tree t = *tp;
834 enum tree_code code = TREE_CODE (t);
836 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
837 copy their subtrees if we can make sure to do it only once. */
838 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
840 if (data && !((hash_set<tree> *)data)->add (t))
842 else
843 *walk_subtrees = 0;
846 /* Stop at types, decls, constants like copy_tree_r. */
847 else if (TREE_CODE_CLASS (code) == tcc_type
848 || TREE_CODE_CLASS (code) == tcc_declaration
849 || TREE_CODE_CLASS (code) == tcc_constant)
850 *walk_subtrees = 0;
852 /* Cope with the statement expression extension. */
853 else if (code == STATEMENT_LIST)
856 /* Leave the bulk of the work to copy_tree_r itself. */
857 else
858 copy_tree_r (tp, walk_subtrees, NULL);
860 return NULL_TREE;
863 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
864 If *TP has been visited already, then *TP is deeply copied by calling
865 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
867 static tree
868 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
870 tree t = *tp;
871 enum tree_code code = TREE_CODE (t);
873 /* Skip types, decls, and constants. But we do want to look at their
874 types and the bounds of types. Mark them as visited so we properly
875 unmark their subtrees on the unmark pass. If we've already seen them,
876 don't look down further. */
877 if (TREE_CODE_CLASS (code) == tcc_type
878 || TREE_CODE_CLASS (code) == tcc_declaration
879 || TREE_CODE_CLASS (code) == tcc_constant)
881 if (TREE_VISITED (t))
882 *walk_subtrees = 0;
883 else
884 TREE_VISITED (t) = 1;
887 /* If this node has been visited already, unshare it and don't look
888 any deeper. */
889 else if (TREE_VISITED (t))
891 walk_tree (tp, mostly_copy_tree_r, data, NULL);
892 *walk_subtrees = 0;
895 /* Otherwise, mark the node as visited and keep looking. */
896 else
897 TREE_VISITED (t) = 1;
899 return NULL_TREE;
902 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
903 copy_if_shared_r callback unmodified. */
905 static inline void
906 copy_if_shared (tree *tp, void *data)
908 walk_tree (tp, copy_if_shared_r, data, NULL);
911 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
912 any nested functions. */
914 static void
915 unshare_body (tree fndecl)
917 struct cgraph_node *cgn = cgraph_node::get (fndecl);
918 /* If the language requires deep unsharing, we need a pointer set to make
919 sure we don't repeatedly unshare subtrees of unshareable nodes. */
920 hash_set<tree> *visited
921 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
923 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
924 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
925 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
927 delete visited;
929 if (cgn)
930 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
931 unshare_body (cgn->decl);
934 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
935 Subtrees are walked until the first unvisited node is encountered. */
937 static tree
938 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
940 tree t = *tp;
942 /* If this node has been visited, unmark it and keep looking. */
943 if (TREE_VISITED (t))
944 TREE_VISITED (t) = 0;
946 /* Otherwise, don't look any deeper. */
947 else
948 *walk_subtrees = 0;
950 return NULL_TREE;
953 /* Unmark the visited trees rooted at *TP. */
955 static inline void
956 unmark_visited (tree *tp)
958 walk_tree (tp, unmark_visited_r, NULL, NULL);
961 /* Likewise, but mark all trees as not visited. */
963 static void
964 unvisit_body (tree fndecl)
966 struct cgraph_node *cgn = cgraph_node::get (fndecl);
968 unmark_visited (&DECL_SAVED_TREE (fndecl));
969 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
970 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
972 if (cgn)
973 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
974 unvisit_body (cgn->decl);
977 /* Unconditionally make an unshared copy of EXPR. This is used when using
978 stored expressions which span multiple functions, such as BINFO_VTABLE,
979 as the normal unsharing process can't tell that they're shared. */
981 tree
982 unshare_expr (tree expr)
984 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
985 return expr;
988 /* Worker for unshare_expr_without_location. */
990 static tree
991 prune_expr_location (tree *tp, int *walk_subtrees, void *)
993 if (EXPR_P (*tp))
994 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
995 else
996 *walk_subtrees = 0;
997 return NULL_TREE;
1000 /* Similar to unshare_expr but also prune all expression locations
1001 from EXPR. */
1003 tree
1004 unshare_expr_without_location (tree expr)
1006 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1007 if (EXPR_P (expr))
1008 walk_tree (&expr, prune_expr_location, NULL, NULL);
1009 return expr;
1012 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1013 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1014 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1015 EXPR is the location of the EXPR. */
1017 static location_t
1018 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1020 if (!expr)
1021 return or_else;
1023 if (EXPR_HAS_LOCATION (expr))
1024 return EXPR_LOCATION (expr);
1026 if (TREE_CODE (expr) != STATEMENT_LIST)
1027 return or_else;
1029 tree_stmt_iterator i = tsi_start (expr);
1031 bool found = false;
1032 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1034 found = true;
1035 tsi_next (&i);
1038 if (!found || !tsi_one_before_end_p (i))
1039 return or_else;
1041 return rexpr_location (tsi_stmt (i), or_else);
1044 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1045 rexpr_location for the potential recursion. */
1047 static inline bool
1048 rexpr_has_location (tree expr)
1050 return rexpr_location (expr) != UNKNOWN_LOCATION;
1054 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1055 contain statements and have a value. Assign its value to a temporary
1056 and give it void_type_node. Return the temporary, or NULL_TREE if
1057 WRAPPER was already void. */
1059 tree
1060 voidify_wrapper_expr (tree wrapper, tree temp)
1062 tree type = TREE_TYPE (wrapper);
1063 if (type && !VOID_TYPE_P (type))
1065 tree *p;
1067 /* Set p to point to the body of the wrapper. Loop until we find
1068 something that isn't a wrapper. */
1069 for (p = &wrapper; p && *p; )
1071 switch (TREE_CODE (*p))
1073 case BIND_EXPR:
1074 TREE_SIDE_EFFECTS (*p) = 1;
1075 TREE_TYPE (*p) = void_type_node;
1076 /* For a BIND_EXPR, the body is operand 1. */
1077 p = &BIND_EXPR_BODY (*p);
1078 break;
1080 case CLEANUP_POINT_EXPR:
1081 case TRY_FINALLY_EXPR:
1082 case TRY_CATCH_EXPR:
1083 TREE_SIDE_EFFECTS (*p) = 1;
1084 TREE_TYPE (*p) = void_type_node;
1085 p = &TREE_OPERAND (*p, 0);
1086 break;
1088 case STATEMENT_LIST:
1090 tree_stmt_iterator i = tsi_last (*p);
1091 TREE_SIDE_EFFECTS (*p) = 1;
1092 TREE_TYPE (*p) = void_type_node;
1093 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1095 break;
1097 case COMPOUND_EXPR:
1098 /* Advance to the last statement. Set all container types to
1099 void. */
1100 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1102 TREE_SIDE_EFFECTS (*p) = 1;
1103 TREE_TYPE (*p) = void_type_node;
1105 break;
1107 case TRANSACTION_EXPR:
1108 TREE_SIDE_EFFECTS (*p) = 1;
1109 TREE_TYPE (*p) = void_type_node;
1110 p = &TRANSACTION_EXPR_BODY (*p);
1111 break;
1113 default:
1114 /* Assume that any tree upon which voidify_wrapper_expr is
1115 directly called is a wrapper, and that its body is op0. */
1116 if (p == &wrapper)
1118 TREE_SIDE_EFFECTS (*p) = 1;
1119 TREE_TYPE (*p) = void_type_node;
1120 p = &TREE_OPERAND (*p, 0);
1121 break;
1123 goto out;
1127 out:
1128 if (p == NULL || IS_EMPTY_STMT (*p))
1129 temp = NULL_TREE;
1130 else if (temp)
1132 /* The wrapper is on the RHS of an assignment that we're pushing
1133 down. */
1134 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1135 || TREE_CODE (temp) == MODIFY_EXPR);
1136 TREE_OPERAND (temp, 1) = *p;
1137 *p = temp;
1139 else
1141 temp = create_tmp_var (type, "retval");
1142 *p = build2 (INIT_EXPR, type, temp, *p);
1145 return temp;
1148 return NULL_TREE;
1151 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1152 a temporary through which they communicate. */
1154 static void
1155 build_stack_save_restore (gcall **save, gcall **restore)
1157 tree tmp_var;
1159 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1160 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1161 gimple_call_set_lhs (*save, tmp_var);
1163 *restore
1164 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1165 1, tmp_var);
1168 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1170 static tree
1171 build_asan_poison_call_expr (tree decl)
1173 /* Do not poison variables that have size equal to zero. */
1174 tree unit_size = DECL_SIZE_UNIT (decl);
1175 if (zerop (unit_size))
1176 return NULL_TREE;
1178 tree base = build_fold_addr_expr (decl);
1180 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1181 void_type_node, 3,
1182 build_int_cst (integer_type_node,
1183 ASAN_MARK_POISON),
1184 base, unit_size);
1187 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1188 on POISON flag, shadow memory of a DECL variable. The call will be
1189 put on location identified by IT iterator, where BEFORE flag drives
1190 position where the stmt will be put. */
1192 static void
1193 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1194 bool before)
1196 tree unit_size = DECL_SIZE_UNIT (decl);
1197 tree base = build_fold_addr_expr (decl);
1199 /* Do not poison variables that have size equal to zero. */
1200 if (zerop (unit_size))
1201 return;
1203 /* It's necessary to have all stack variables aligned to ASAN granularity
1204 bytes. */
1205 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1206 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1208 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1210 gimple *g
1211 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1212 build_int_cst (integer_type_node, flags),
1213 base, unit_size);
1215 if (before)
1216 gsi_insert_before (it, g, GSI_NEW_STMT);
1217 else
1218 gsi_insert_after (it, g, GSI_NEW_STMT);
1221 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1222 either poisons or unpoisons a DECL. Created statement is appended
1223 to SEQ_P gimple sequence. */
1225 static void
1226 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1228 gimple_stmt_iterator it = gsi_last (*seq_p);
1229 bool before = false;
1231 if (gsi_end_p (it))
1232 before = true;
1234 asan_poison_variable (decl, poison, &it, before);
1237 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1239 static int
1240 sort_by_decl_uid (const void *a, const void *b)
1242 const tree *t1 = (const tree *)a;
1243 const tree *t2 = (const tree *)b;
1245 int uid1 = DECL_UID (*t1);
1246 int uid2 = DECL_UID (*t2);
1248 if (uid1 < uid2)
1249 return -1;
1250 else if (uid1 > uid2)
1251 return 1;
1252 else
1253 return 0;
1256 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1257 depending on POISON flag. Created statement is appended
1258 to SEQ_P gimple sequence. */
1260 static void
1261 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1263 unsigned c = variables->elements ();
1264 if (c == 0)
1265 return;
1267 auto_vec<tree> sorted_variables (c);
1269 for (hash_set<tree>::iterator it = variables->begin ();
1270 it != variables->end (); ++it)
1271 sorted_variables.safe_push (*it);
1273 sorted_variables.qsort (sort_by_decl_uid);
1275 unsigned i;
1276 tree var;
1277 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1279 asan_poison_variable (var, poison, seq_p);
1281 /* Add use_after_scope_memory attribute for the variable in order
1282 to prevent re-written into SSA. */
1283 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1284 DECL_ATTRIBUTES (var)))
1285 DECL_ATTRIBUTES (var)
1286 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1287 integer_one_node,
1288 DECL_ATTRIBUTES (var));
1292 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1294 static enum gimplify_status
1295 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1297 tree bind_expr = *expr_p;
1298 bool old_keep_stack = gimplify_ctxp->keep_stack;
1299 bool old_save_stack = gimplify_ctxp->save_stack;
1300 tree t;
1301 gbind *bind_stmt;
1302 gimple_seq body, cleanup;
1303 gcall *stack_save;
1304 location_t start_locus = 0, end_locus = 0;
1305 tree ret_clauses = NULL;
1307 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1309 /* Mark variables seen in this bind expr. */
1310 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1312 if (VAR_P (t))
1314 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1316 /* Mark variable as local. */
1317 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1318 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1319 || splay_tree_lookup (ctx->variables,
1320 (splay_tree_key) t) == NULL))
1322 if (ctx->region_type == ORT_SIMD
1323 && TREE_ADDRESSABLE (t)
1324 && !TREE_STATIC (t))
1325 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1326 else
1327 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1330 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1332 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1333 cfun->has_local_explicit_reg_vars = true;
1336 /* Preliminarily mark non-addressed complex variables as eligible
1337 for promotion to gimple registers. We'll transform their uses
1338 as we find them. */
1339 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1340 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1341 && !TREE_THIS_VOLATILE (t)
1342 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1343 && !needs_to_live_in_memory (t))
1344 DECL_GIMPLE_REG_P (t) = 1;
1347 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1348 BIND_EXPR_BLOCK (bind_expr));
1349 gimple_push_bind_expr (bind_stmt);
1351 gimplify_ctxp->keep_stack = false;
1352 gimplify_ctxp->save_stack = false;
1354 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1355 body = NULL;
1356 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1357 gimple_bind_set_body (bind_stmt, body);
1359 /* Source location wise, the cleanup code (stack_restore and clobbers)
1360 belongs to the end of the block, so propagate what we have. The
1361 stack_save operation belongs to the beginning of block, which we can
1362 infer from the bind_expr directly if the block has no explicit
1363 assignment. */
1364 if (BIND_EXPR_BLOCK (bind_expr))
1366 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1367 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1369 if (start_locus == 0)
1370 start_locus = EXPR_LOCATION (bind_expr);
1372 cleanup = NULL;
1373 stack_save = NULL;
1375 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1376 the stack space allocated to the VLAs. */
1377 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1379 gcall *stack_restore;
1381 /* Save stack on entry and restore it on exit. Add a try_finally
1382 block to achieve this. */
1383 build_stack_save_restore (&stack_save, &stack_restore);
1385 gimple_set_location (stack_save, start_locus);
1386 gimple_set_location (stack_restore, end_locus);
1388 gimplify_seq_add_stmt (&cleanup, stack_restore);
1391 /* Add clobbers for all variables that go out of scope. */
1392 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1394 if (VAR_P (t)
1395 && !is_global_var (t)
1396 && DECL_CONTEXT (t) == current_function_decl)
1398 if (!DECL_HARD_REGISTER (t)
1399 && !TREE_THIS_VOLATILE (t)
1400 && !DECL_HAS_VALUE_EXPR_P (t)
1401 /* Only care for variables that have to be in memory. Others
1402 will be rewritten into SSA names, hence moved to the
1403 top-level. */
1404 && !is_gimple_reg (t)
1405 && flag_stack_reuse != SR_NONE)
1407 tree clobber = build_clobber (TREE_TYPE (t));
1408 gimple *clobber_stmt;
1409 clobber_stmt = gimple_build_assign (t, clobber);
1410 gimple_set_location (clobber_stmt, end_locus);
1411 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1414 if (flag_openacc && oacc_declare_returns != NULL)
1416 tree *c = oacc_declare_returns->get (t);
1417 if (c != NULL)
1419 if (ret_clauses)
1420 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1422 ret_clauses = *c;
1424 oacc_declare_returns->remove (t);
1426 if (oacc_declare_returns->elements () == 0)
1428 delete oacc_declare_returns;
1429 oacc_declare_returns = NULL;
1435 if (asan_poisoned_variables != NULL
1436 && asan_poisoned_variables->contains (t))
1438 asan_poisoned_variables->remove (t);
1439 asan_poison_variable (t, true, &cleanup);
1442 if (gimplify_ctxp->live_switch_vars != NULL
1443 && gimplify_ctxp->live_switch_vars->contains (t))
1444 gimplify_ctxp->live_switch_vars->remove (t);
1447 if (ret_clauses)
1449 gomp_target *stmt;
1450 gimple_stmt_iterator si = gsi_start (cleanup);
1452 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1453 ret_clauses);
1454 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1457 if (cleanup)
1459 gtry *gs;
1460 gimple_seq new_body;
1462 new_body = NULL;
1463 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1464 GIMPLE_TRY_FINALLY);
1466 if (stack_save)
1467 gimplify_seq_add_stmt (&new_body, stack_save);
1468 gimplify_seq_add_stmt (&new_body, gs);
1469 gimple_bind_set_body (bind_stmt, new_body);
1472 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1473 if (!gimplify_ctxp->keep_stack)
1474 gimplify_ctxp->keep_stack = old_keep_stack;
1475 gimplify_ctxp->save_stack = old_save_stack;
1477 gimple_pop_bind_expr ();
1479 gimplify_seq_add_stmt (pre_p, bind_stmt);
1481 if (temp)
1483 *expr_p = temp;
1484 return GS_OK;
1487 *expr_p = NULL_TREE;
1488 return GS_ALL_DONE;
1491 /* Maybe add early return predict statement to PRE_P sequence. */
1493 static void
1494 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1496 /* If we are not in a conditional context, add PREDICT statement. */
1497 if (gimple_conditional_context ())
1499 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1500 NOT_TAKEN);
1501 gimplify_seq_add_stmt (pre_p, predict);
1505 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1506 GIMPLE value, it is assigned to a new temporary and the statement is
1507 re-written to return the temporary.
1509 PRE_P points to the sequence where side effects that must happen before
1510 STMT should be stored. */
1512 static enum gimplify_status
1513 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1515 greturn *ret;
1516 tree ret_expr = TREE_OPERAND (stmt, 0);
1517 tree result_decl, result;
1519 if (ret_expr == error_mark_node)
1520 return GS_ERROR;
1522 if (!ret_expr
1523 || TREE_CODE (ret_expr) == RESULT_DECL)
1525 maybe_add_early_return_predict_stmt (pre_p);
1526 greturn *ret = gimple_build_return (ret_expr);
1527 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1528 gimplify_seq_add_stmt (pre_p, ret);
1529 return GS_ALL_DONE;
1532 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1533 result_decl = NULL_TREE;
1534 else
1536 result_decl = TREE_OPERAND (ret_expr, 0);
1538 /* See through a return by reference. */
1539 if (TREE_CODE (result_decl) == INDIRECT_REF)
1540 result_decl = TREE_OPERAND (result_decl, 0);
1542 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1543 || TREE_CODE (ret_expr) == INIT_EXPR)
1544 && TREE_CODE (result_decl) == RESULT_DECL);
1547 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1548 Recall that aggregate_value_p is FALSE for any aggregate type that is
1549 returned in registers. If we're returning values in registers, then
1550 we don't want to extend the lifetime of the RESULT_DECL, particularly
1551 across another call. In addition, for those aggregates for which
1552 hard_function_value generates a PARALLEL, we'll die during normal
1553 expansion of structure assignments; there's special code in expand_return
1554 to handle this case that does not exist in expand_expr. */
1555 if (!result_decl)
1556 result = NULL_TREE;
1557 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1559 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1561 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1562 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1563 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1564 should be effectively allocated by the caller, i.e. all calls to
1565 this function must be subject to the Return Slot Optimization. */
1566 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1567 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1569 result = result_decl;
1571 else if (gimplify_ctxp->return_temp)
1572 result = gimplify_ctxp->return_temp;
1573 else
1575 result = create_tmp_reg (TREE_TYPE (result_decl));
1577 /* ??? With complex control flow (usually involving abnormal edges),
1578 we can wind up warning about an uninitialized value for this. Due
1579 to how this variable is constructed and initialized, this is never
1580 true. Give up and never warn. */
1581 TREE_NO_WARNING (result) = 1;
1583 gimplify_ctxp->return_temp = result;
1586 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1587 Then gimplify the whole thing. */
1588 if (result != result_decl)
1589 TREE_OPERAND (ret_expr, 0) = result;
1591 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1593 maybe_add_early_return_predict_stmt (pre_p);
1594 ret = gimple_build_return (result);
1595 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1596 gimplify_seq_add_stmt (pre_p, ret);
1598 return GS_ALL_DONE;
1601 /* Gimplify a variable-length array DECL. */
1603 static void
1604 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1606 /* This is a variable-sized decl. Simplify its size and mark it
1607 for deferred expansion. */
1608 tree t, addr, ptr_type;
1610 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1611 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1613 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1614 if (DECL_HAS_VALUE_EXPR_P (decl))
1615 return;
1617 /* All occurrences of this decl in final gimplified code will be
1618 replaced by indirection. Setting DECL_VALUE_EXPR does two
1619 things: First, it lets the rest of the gimplifier know what
1620 replacement to use. Second, it lets the debug info know
1621 where to find the value. */
1622 ptr_type = build_pointer_type (TREE_TYPE (decl));
1623 addr = create_tmp_var (ptr_type, get_name (decl));
1624 DECL_IGNORED_P (addr) = 0;
1625 t = build_fold_indirect_ref (addr);
1626 TREE_THIS_NOTRAP (t) = 1;
1627 SET_DECL_VALUE_EXPR (decl, t);
1628 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1630 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1631 max_int_size_in_bytes (TREE_TYPE (decl)));
1632 /* The call has been built for a variable-sized object. */
1633 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1634 t = fold_convert (ptr_type, t);
1635 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1637 gimplify_and_add (t, seq_p);
1640 /* A helper function to be called via walk_tree. Mark all labels under *TP
1641 as being forced. To be called for DECL_INITIAL of static variables. */
1643 static tree
1644 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1646 if (TYPE_P (*tp))
1647 *walk_subtrees = 0;
1648 if (TREE_CODE (*tp) == LABEL_DECL)
1650 FORCED_LABEL (*tp) = 1;
1651 cfun->has_forced_label_in_static = 1;
1654 return NULL_TREE;
1657 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1658 and initialization explicit. */
1660 static enum gimplify_status
1661 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1663 tree stmt = *stmt_p;
1664 tree decl = DECL_EXPR_DECL (stmt);
1666 *stmt_p = NULL_TREE;
1668 if (TREE_TYPE (decl) == error_mark_node)
1669 return GS_ERROR;
1671 if ((TREE_CODE (decl) == TYPE_DECL
1672 || VAR_P (decl))
1673 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1675 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1676 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1677 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1680 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1681 in case its size expressions contain problematic nodes like CALL_EXPR. */
1682 if (TREE_CODE (decl) == TYPE_DECL
1683 && DECL_ORIGINAL_TYPE (decl)
1684 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1686 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1687 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1688 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1691 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1693 tree init = DECL_INITIAL (decl);
1694 bool is_vla = false;
1696 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1697 || (!TREE_STATIC (decl)
1698 && flag_stack_check == GENERIC_STACK_CHECK
1699 && compare_tree_int (DECL_SIZE_UNIT (decl),
1700 STACK_CHECK_MAX_VAR_SIZE) > 0))
1702 gimplify_vla_decl (decl, seq_p);
1703 is_vla = true;
1706 if (asan_poisoned_variables
1707 && !is_vla
1708 && TREE_ADDRESSABLE (decl)
1709 && !TREE_STATIC (decl)
1710 && !DECL_HAS_VALUE_EXPR_P (decl)
1711 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1712 && dbg_cnt (asan_use_after_scope)
1713 && !gimplify_omp_ctxp)
1715 asan_poisoned_variables->add (decl);
1716 asan_poison_variable (decl, false, seq_p);
1717 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1718 gimplify_ctxp->live_switch_vars->add (decl);
1721 /* Some front ends do not explicitly declare all anonymous
1722 artificial variables. We compensate here by declaring the
1723 variables, though it would be better if the front ends would
1724 explicitly declare them. */
1725 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1726 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1727 gimple_add_tmp_var (decl);
1729 if (init && init != error_mark_node)
1731 if (!TREE_STATIC (decl))
1733 DECL_INITIAL (decl) = NULL_TREE;
1734 init = build2 (INIT_EXPR, void_type_node, decl, init);
1735 gimplify_and_add (init, seq_p);
1736 ggc_free (init);
1738 else
1739 /* We must still examine initializers for static variables
1740 as they may contain a label address. */
1741 walk_tree (&init, force_labels_r, NULL, NULL);
1745 return GS_ALL_DONE;
1748 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1749 and replacing the LOOP_EXPR with goto, but if the loop contains an
1750 EXIT_EXPR, we need to append a label for it to jump to. */
1752 static enum gimplify_status
1753 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1755 tree saved_label = gimplify_ctxp->exit_label;
1756 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1758 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1760 gimplify_ctxp->exit_label = NULL_TREE;
1762 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1764 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1766 if (gimplify_ctxp->exit_label)
1767 gimplify_seq_add_stmt (pre_p,
1768 gimple_build_label (gimplify_ctxp->exit_label));
1770 gimplify_ctxp->exit_label = saved_label;
1772 *expr_p = NULL;
1773 return GS_ALL_DONE;
1776 /* Gimplify a statement list onto a sequence. These may be created either
1777 by an enlightened front-end, or by shortcut_cond_expr. */
1779 static enum gimplify_status
1780 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1782 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1784 tree_stmt_iterator i = tsi_start (*expr_p);
1786 while (!tsi_end_p (i))
1788 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1789 tsi_delink (&i);
1792 if (temp)
1794 *expr_p = temp;
1795 return GS_OK;
1798 return GS_ALL_DONE;
1801 /* Callback for walk_gimple_seq. */
1803 static tree
1804 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1805 struct walk_stmt_info *wi)
1807 gimple *stmt = gsi_stmt (*gsi_p);
1809 *handled_ops_p = true;
1810 switch (gimple_code (stmt))
1812 case GIMPLE_TRY:
1813 /* A compiler-generated cleanup or a user-written try block.
1814 If it's empty, don't dive into it--that would result in
1815 worse location info. */
1816 if (gimple_try_eval (stmt) == NULL)
1818 wi->info = stmt;
1819 return integer_zero_node;
1821 /* Fall through. */
1822 case GIMPLE_BIND:
1823 case GIMPLE_CATCH:
1824 case GIMPLE_EH_FILTER:
1825 case GIMPLE_TRANSACTION:
1826 /* Walk the sub-statements. */
1827 *handled_ops_p = false;
1828 break;
1830 case GIMPLE_DEBUG:
1831 /* Ignore these. We may generate them before declarations that
1832 are never executed. If there's something to warn about,
1833 there will be non-debug stmts too, and we'll catch those. */
1834 break;
1836 case GIMPLE_CALL:
1837 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1839 *handled_ops_p = false;
1840 break;
1842 /* Fall through. */
1843 default:
1844 /* Save the first "real" statement (not a decl/lexical scope/...). */
1845 wi->info = stmt;
1846 return integer_zero_node;
1848 return NULL_TREE;
1851 /* Possibly warn about unreachable statements between switch's controlling
1852 expression and the first case. SEQ is the body of a switch expression. */
1854 static void
1855 maybe_warn_switch_unreachable (gimple_seq seq)
1857 if (!warn_switch_unreachable
1858 /* This warning doesn't play well with Fortran when optimizations
1859 are on. */
1860 || lang_GNU_Fortran ()
1861 || seq == NULL)
1862 return;
1864 struct walk_stmt_info wi;
1865 memset (&wi, 0, sizeof (wi));
1866 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1867 gimple *stmt = (gimple *) wi.info;
1869 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1871 if (gimple_code (stmt) == GIMPLE_GOTO
1872 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1873 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1874 /* Don't warn for compiler-generated gotos. These occur
1875 in Duff's devices, for example. */;
1876 else
1877 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1878 "statement will never be executed");
1883 /* A label entry that pairs label and a location. */
1884 struct label_entry
1886 tree label;
1887 location_t loc;
1890 /* Find LABEL in vector of label entries VEC. */
1892 static struct label_entry *
1893 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1895 unsigned int i;
1896 struct label_entry *l;
1898 FOR_EACH_VEC_ELT (*vec, i, l)
1899 if (l->label == label)
1900 return l;
1901 return NULL;
1904 /* Return true if LABEL, a LABEL_DECL, represents a case label
1905 in a vector of labels CASES. */
1907 static bool
1908 case_label_p (const vec<tree> *cases, tree label)
1910 unsigned int i;
1911 tree l;
1913 FOR_EACH_VEC_ELT (*cases, i, l)
1914 if (CASE_LABEL (l) == label)
1915 return true;
1916 return false;
1919 /* Find the last nondebug statement in a scope STMT. */
1921 static gimple *
1922 last_stmt_in_scope (gimple *stmt)
1924 if (!stmt)
1925 return NULL;
1927 switch (gimple_code (stmt))
1929 case GIMPLE_BIND:
1931 gbind *bind = as_a <gbind *> (stmt);
1932 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
1933 return last_stmt_in_scope (stmt);
1936 case GIMPLE_TRY:
1938 gtry *try_stmt = as_a <gtry *> (stmt);
1939 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
1940 gimple *last_eval = last_stmt_in_scope (stmt);
1941 if (gimple_stmt_may_fallthru (last_eval)
1942 && (last_eval == NULL
1943 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1944 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1946 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
1947 return last_stmt_in_scope (stmt);
1949 else
1950 return last_eval;
1953 case GIMPLE_DEBUG:
1954 gcc_unreachable ();
1956 default:
1957 return stmt;
1961 /* Collect interesting labels in LABELS and return the statement preceding
1962 another case label, or a user-defined label. Store a location useful
1963 to give warnings at *PREVLOC (usually the location of the returned
1964 statement or of its surrounding scope). */
1966 static gimple *
1967 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1968 auto_vec <struct label_entry> *labels,
1969 location_t *prevloc)
1971 gimple *prev = NULL;
1973 *prevloc = UNKNOWN_LOCATION;
1976 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
1978 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
1979 which starts on a GIMPLE_SWITCH and ends with a break label.
1980 Handle that as a single statement that can fall through. */
1981 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
1982 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
1983 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
1984 if (last
1985 && gimple_code (first) == GIMPLE_SWITCH
1986 && gimple_code (last) == GIMPLE_LABEL)
1988 tree label = gimple_label_label (as_a <glabel *> (last));
1989 if (SWITCH_BREAK_LABEL_P (label))
1991 prev = bind;
1992 gsi_next (gsi_p);
1993 continue;
1997 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1998 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2000 /* Nested scope. Only look at the last statement of
2001 the innermost scope. */
2002 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2003 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2004 if (last)
2006 prev = last;
2007 /* It might be a label without a location. Use the
2008 location of the scope then. */
2009 if (!gimple_has_location (prev))
2010 *prevloc = bind_loc;
2012 gsi_next (gsi_p);
2013 continue;
2016 /* Ifs are tricky. */
2017 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2019 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2020 tree false_lab = gimple_cond_false_label (cond_stmt);
2021 location_t if_loc = gimple_location (cond_stmt);
2023 /* If we have e.g.
2024 if (i > 1) goto <D.2259>; else goto D;
2025 we can't do much with the else-branch. */
2026 if (!DECL_ARTIFICIAL (false_lab))
2027 break;
2029 /* Go on until the false label, then one step back. */
2030 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2032 gimple *stmt = gsi_stmt (*gsi_p);
2033 if (gimple_code (stmt) == GIMPLE_LABEL
2034 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2035 break;
2038 /* Not found? Oops. */
2039 if (gsi_end_p (*gsi_p))
2040 break;
2042 struct label_entry l = { false_lab, if_loc };
2043 labels->safe_push (l);
2045 /* Go to the last statement of the then branch. */
2046 gsi_prev (gsi_p);
2048 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2049 <D.1759>:
2050 <stmt>;
2051 goto <D.1761>;
2052 <D.1760>:
2054 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2055 && !gimple_has_location (gsi_stmt (*gsi_p)))
2057 /* Look at the statement before, it might be
2058 attribute fallthrough, in which case don't warn. */
2059 gsi_prev (gsi_p);
2060 bool fallthru_before_dest
2061 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2062 gsi_next (gsi_p);
2063 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2064 if (!fallthru_before_dest)
2066 struct label_entry l = { goto_dest, if_loc };
2067 labels->safe_push (l);
2070 /* And move back. */
2071 gsi_next (gsi_p);
2074 /* Remember the last statement. Skip labels that are of no interest
2075 to us. */
2076 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2078 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2079 if (find_label_entry (labels, label))
2080 prev = gsi_stmt (*gsi_p);
2082 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2084 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2085 prev = gsi_stmt (*gsi_p);
2086 gsi_next (gsi_p);
2088 while (!gsi_end_p (*gsi_p)
2089 /* Stop if we find a case or a user-defined label. */
2090 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2091 || !gimple_has_location (gsi_stmt (*gsi_p))));
2093 if (prev && gimple_has_location (prev))
2094 *prevloc = gimple_location (prev);
2095 return prev;
2098 /* Return true if the switch fallthough warning should occur. LABEL is
2099 the label statement that we're falling through to. */
2101 static bool
2102 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2104 gimple_stmt_iterator gsi = *gsi_p;
2106 /* Don't warn if the label is marked with a "falls through" comment. */
2107 if (FALLTHROUGH_LABEL_P (label))
2108 return false;
2110 /* Don't warn for non-case labels followed by a statement:
2111 case 0:
2112 foo ();
2113 label:
2114 bar ();
2115 as these are likely intentional. */
2116 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2118 tree l;
2119 while (!gsi_end_p (gsi)
2120 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2121 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2122 && !case_label_p (&gimplify_ctxp->case_labels, l))
2123 gsi_next_nondebug (&gsi);
2124 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2125 return false;
2128 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2129 immediately breaks. */
2130 gsi = *gsi_p;
2132 /* Skip all immediately following labels. */
2133 while (!gsi_end_p (gsi)
2134 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2135 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2136 gsi_next_nondebug (&gsi);
2138 /* { ... something; default:; } */
2139 if (gsi_end_p (gsi)
2140 /* { ... something; default: break; } or
2141 { ... something; default: goto L; } */
2142 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2143 /* { ... something; default: return; } */
2144 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2145 return false;
2147 return true;
2150 /* Callback for walk_gimple_seq. */
2152 static tree
2153 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2154 struct walk_stmt_info *)
2156 gimple *stmt = gsi_stmt (*gsi_p);
2158 *handled_ops_p = true;
2159 switch (gimple_code (stmt))
2161 case GIMPLE_TRY:
2162 case GIMPLE_BIND:
2163 case GIMPLE_CATCH:
2164 case GIMPLE_EH_FILTER:
2165 case GIMPLE_TRANSACTION:
2166 /* Walk the sub-statements. */
2167 *handled_ops_p = false;
2168 break;
2170 /* Find a sequence of form:
2172 GIMPLE_LABEL
2173 [...]
2174 <may fallthru stmt>
2175 GIMPLE_LABEL
2177 and possibly warn. */
2178 case GIMPLE_LABEL:
2180 /* Found a label. Skip all immediately following labels. */
2181 while (!gsi_end_p (*gsi_p)
2182 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2183 gsi_next_nondebug (gsi_p);
2185 /* There might be no more statements. */
2186 if (gsi_end_p (*gsi_p))
2187 return integer_zero_node;
2189 /* Vector of labels that fall through. */
2190 auto_vec <struct label_entry> labels;
2191 location_t prevloc;
2192 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2194 /* There might be no more statements. */
2195 if (gsi_end_p (*gsi_p))
2196 return integer_zero_node;
2198 gimple *next = gsi_stmt (*gsi_p);
2199 tree label;
2200 /* If what follows is a label, then we may have a fallthrough. */
2201 if (gimple_code (next) == GIMPLE_LABEL
2202 && gimple_has_location (next)
2203 && (label = gimple_label_label (as_a <glabel *> (next)))
2204 && prev != NULL)
2206 struct label_entry *l;
2207 bool warned_p = false;
2208 auto_diagnostic_group d;
2209 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2210 /* Quiet. */;
2211 else if (gimple_code (prev) == GIMPLE_LABEL
2212 && (label = gimple_label_label (as_a <glabel *> (prev)))
2213 && (l = find_label_entry (&labels, label)))
2214 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2215 "this statement may fall through");
2216 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2217 /* Try to be clever and don't warn when the statement
2218 can't actually fall through. */
2219 && gimple_stmt_may_fallthru (prev)
2220 && prevloc != UNKNOWN_LOCATION)
2221 warned_p = warning_at (prevloc,
2222 OPT_Wimplicit_fallthrough_,
2223 "this statement may fall through");
2224 if (warned_p)
2225 inform (gimple_location (next), "here");
2227 /* Mark this label as processed so as to prevent multiple
2228 warnings in nested switches. */
2229 FALLTHROUGH_LABEL_P (label) = true;
2231 /* So that next warn_implicit_fallthrough_r will start looking for
2232 a new sequence starting with this label. */
2233 gsi_prev (gsi_p);
2236 break;
2237 default:
2238 break;
2240 return NULL_TREE;
2243 /* Warn when a switch case falls through. */
2245 static void
2246 maybe_warn_implicit_fallthrough (gimple_seq seq)
2248 if (!warn_implicit_fallthrough)
2249 return;
2251 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2252 if (!(lang_GNU_C ()
2253 || lang_GNU_CXX ()
2254 || lang_GNU_OBJC ()))
2255 return;
2257 struct walk_stmt_info wi;
2258 memset (&wi, 0, sizeof (wi));
2259 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2262 /* Callback for walk_gimple_seq. */
2264 static tree
2265 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2266 struct walk_stmt_info *wi)
2268 gimple *stmt = gsi_stmt (*gsi_p);
2270 *handled_ops_p = true;
2271 switch (gimple_code (stmt))
2273 case GIMPLE_TRY:
2274 case GIMPLE_BIND:
2275 case GIMPLE_CATCH:
2276 case GIMPLE_EH_FILTER:
2277 case GIMPLE_TRANSACTION:
2278 /* Walk the sub-statements. */
2279 *handled_ops_p = false;
2280 break;
2281 case GIMPLE_CALL:
2282 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2284 gsi_remove (gsi_p, true);
2285 if (gsi_end_p (*gsi_p))
2287 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2288 return integer_zero_node;
2291 bool found = false;
2292 location_t loc = gimple_location (stmt);
2294 gimple_stmt_iterator gsi2 = *gsi_p;
2295 stmt = gsi_stmt (gsi2);
2296 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2298 /* Go on until the artificial label. */
2299 tree goto_dest = gimple_goto_dest (stmt);
2300 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2302 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2303 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2304 == goto_dest)
2305 break;
2308 /* Not found? Stop. */
2309 if (gsi_end_p (gsi2))
2310 break;
2312 /* Look one past it. */
2313 gsi_next (&gsi2);
2316 /* We're looking for a case label or default label here. */
2317 while (!gsi_end_p (gsi2))
2319 stmt = gsi_stmt (gsi2);
2320 if (gimple_code (stmt) == GIMPLE_LABEL)
2322 tree label = gimple_label_label (as_a <glabel *> (stmt));
2323 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2325 found = true;
2326 break;
2329 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2331 else if (!is_gimple_debug (stmt))
2332 /* Anything else is not expected. */
2333 break;
2334 gsi_next (&gsi2);
2336 if (!found)
2337 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2338 "a case label or default label");
2340 break;
2341 default:
2342 break;
2344 return NULL_TREE;
2347 /* Expand all FALLTHROUGH () calls in SEQ. */
2349 static void
2350 expand_FALLTHROUGH (gimple_seq *seq_p)
2352 struct walk_stmt_info wi;
2353 location_t loc;
2354 memset (&wi, 0, sizeof (wi));
2355 wi.info = (void *) &loc;
2356 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2357 if (wi.callback_result == integer_zero_node)
2358 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2359 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2360 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2361 "a case label or default label");
2365 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2366 branch to. */
2368 static enum gimplify_status
2369 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2371 tree switch_expr = *expr_p;
2372 gimple_seq switch_body_seq = NULL;
2373 enum gimplify_status ret;
2374 tree index_type = TREE_TYPE (switch_expr);
2375 if (index_type == NULL_TREE)
2376 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2378 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2379 fb_rvalue);
2380 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2381 return ret;
2383 if (SWITCH_BODY (switch_expr))
2385 vec<tree> labels;
2386 vec<tree> saved_labels;
2387 hash_set<tree> *saved_live_switch_vars = NULL;
2388 tree default_case = NULL_TREE;
2389 gswitch *switch_stmt;
2391 /* Save old labels, get new ones from body, then restore the old
2392 labels. Save all the things from the switch body to append after. */
2393 saved_labels = gimplify_ctxp->case_labels;
2394 gimplify_ctxp->case_labels.create (8);
2396 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2397 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2398 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2399 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2400 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2401 else
2402 gimplify_ctxp->live_switch_vars = NULL;
2404 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2405 gimplify_ctxp->in_switch_expr = true;
2407 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2409 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2410 maybe_warn_switch_unreachable (switch_body_seq);
2411 maybe_warn_implicit_fallthrough (switch_body_seq);
2412 /* Only do this for the outermost GIMPLE_SWITCH. */
2413 if (!gimplify_ctxp->in_switch_expr)
2414 expand_FALLTHROUGH (&switch_body_seq);
2416 labels = gimplify_ctxp->case_labels;
2417 gimplify_ctxp->case_labels = saved_labels;
2419 if (gimplify_ctxp->live_switch_vars)
2421 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2422 delete gimplify_ctxp->live_switch_vars;
2424 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2426 preprocess_case_label_vec_for_gimple (labels, index_type,
2427 &default_case);
2429 bool add_bind = false;
2430 if (!default_case)
2432 glabel *new_default;
2434 default_case
2435 = build_case_label (NULL_TREE, NULL_TREE,
2436 create_artificial_label (UNKNOWN_LOCATION));
2437 if (old_in_switch_expr)
2439 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2440 add_bind = true;
2442 new_default = gimple_build_label (CASE_LABEL (default_case));
2443 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2445 else if (old_in_switch_expr)
2447 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2448 if (last && gimple_code (last) == GIMPLE_LABEL)
2450 tree label = gimple_label_label (as_a <glabel *> (last));
2451 if (SWITCH_BREAK_LABEL_P (label))
2452 add_bind = true;
2456 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2457 default_case, labels);
2458 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2459 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2460 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2461 so that we can easily find the start and end of the switch
2462 statement. */
2463 if (add_bind)
2465 gimple_seq bind_body = NULL;
2466 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2467 gimple_seq_add_seq (&bind_body, switch_body_seq);
2468 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2469 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2470 gimplify_seq_add_stmt (pre_p, bind);
2472 else
2474 gimplify_seq_add_stmt (pre_p, switch_stmt);
2475 gimplify_seq_add_seq (pre_p, switch_body_seq);
2477 labels.release ();
2479 else
2480 gcc_unreachable ();
2482 return GS_ALL_DONE;
2485 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2487 static enum gimplify_status
2488 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2490 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2491 == current_function_decl);
2493 tree label = LABEL_EXPR_LABEL (*expr_p);
2494 glabel *label_stmt = gimple_build_label (label);
2495 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2496 gimplify_seq_add_stmt (pre_p, label_stmt);
2498 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2499 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2500 NOT_TAKEN));
2501 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2502 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2503 TAKEN));
2505 return GS_ALL_DONE;
2508 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2510 static enum gimplify_status
2511 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2513 struct gimplify_ctx *ctxp;
2514 glabel *label_stmt;
2516 /* Invalid programs can play Duff's Device type games with, for example,
2517 #pragma omp parallel. At least in the C front end, we don't
2518 detect such invalid branches until after gimplification, in the
2519 diagnose_omp_blocks pass. */
2520 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2521 if (ctxp->case_labels.exists ())
2522 break;
2524 tree label = CASE_LABEL (*expr_p);
2525 label_stmt = gimple_build_label (label);
2526 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2527 ctxp->case_labels.safe_push (*expr_p);
2528 gimplify_seq_add_stmt (pre_p, label_stmt);
2530 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2531 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2532 NOT_TAKEN));
2533 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2534 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2535 TAKEN));
2537 return GS_ALL_DONE;
2540 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2541 if necessary. */
2543 tree
2544 build_and_jump (tree *label_p)
2546 if (label_p == NULL)
2547 /* If there's nowhere to jump, just fall through. */
2548 return NULL_TREE;
2550 if (*label_p == NULL_TREE)
2552 tree label = create_artificial_label (UNKNOWN_LOCATION);
2553 *label_p = label;
2556 return build1 (GOTO_EXPR, void_type_node, *label_p);
2559 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2560 This also involves building a label to jump to and communicating it to
2561 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2563 static enum gimplify_status
2564 gimplify_exit_expr (tree *expr_p)
2566 tree cond = TREE_OPERAND (*expr_p, 0);
2567 tree expr;
2569 expr = build_and_jump (&gimplify_ctxp->exit_label);
2570 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2571 *expr_p = expr;
2573 return GS_OK;
2576 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2577 different from its canonical type, wrap the whole thing inside a
2578 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2579 type.
2581 The canonical type of a COMPONENT_REF is the type of the field being
2582 referenced--unless the field is a bit-field which can be read directly
2583 in a smaller mode, in which case the canonical type is the
2584 sign-appropriate type corresponding to that mode. */
2586 static void
2587 canonicalize_component_ref (tree *expr_p)
2589 tree expr = *expr_p;
2590 tree type;
2592 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2594 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2595 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2596 else
2597 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2599 /* One could argue that all the stuff below is not necessary for
2600 the non-bitfield case and declare it a FE error if type
2601 adjustment would be needed. */
2602 if (TREE_TYPE (expr) != type)
2604 #ifdef ENABLE_TYPES_CHECKING
2605 tree old_type = TREE_TYPE (expr);
2606 #endif
2607 int type_quals;
2609 /* We need to preserve qualifiers and propagate them from
2610 operand 0. */
2611 type_quals = TYPE_QUALS (type)
2612 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2613 if (TYPE_QUALS (type) != type_quals)
2614 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2616 /* Set the type of the COMPONENT_REF to the underlying type. */
2617 TREE_TYPE (expr) = type;
2619 #ifdef ENABLE_TYPES_CHECKING
2620 /* It is now a FE error, if the conversion from the canonical
2621 type to the original expression type is not useless. */
2622 gcc_assert (useless_type_conversion_p (old_type, type));
2623 #endif
2627 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2628 to foo, embed that change in the ADDR_EXPR by converting
2629 T array[U];
2630 (T *)&array
2632 &array[L]
2633 where L is the lower bound. For simplicity, only do this for constant
2634 lower bound.
2635 The constraint is that the type of &array[L] is trivially convertible
2636 to T *. */
2638 static void
2639 canonicalize_addr_expr (tree *expr_p)
2641 tree expr = *expr_p;
2642 tree addr_expr = TREE_OPERAND (expr, 0);
2643 tree datype, ddatype, pddatype;
2645 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2646 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2647 || TREE_CODE (addr_expr) != ADDR_EXPR)
2648 return;
2650 /* The addr_expr type should be a pointer to an array. */
2651 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2652 if (TREE_CODE (datype) != ARRAY_TYPE)
2653 return;
2655 /* The pointer to element type shall be trivially convertible to
2656 the expression pointer type. */
2657 ddatype = TREE_TYPE (datype);
2658 pddatype = build_pointer_type (ddatype);
2659 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2660 pddatype))
2661 return;
2663 /* The lower bound and element sizes must be constant. */
2664 if (!TYPE_SIZE_UNIT (ddatype)
2665 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2666 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2667 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2668 return;
2670 /* All checks succeeded. Build a new node to merge the cast. */
2671 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2672 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2673 NULL_TREE, NULL_TREE);
2674 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2676 /* We can have stripped a required restrict qualifier above. */
2677 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2678 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2681 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2682 underneath as appropriate. */
2684 static enum gimplify_status
2685 gimplify_conversion (tree *expr_p)
2687 location_t loc = EXPR_LOCATION (*expr_p);
2688 gcc_assert (CONVERT_EXPR_P (*expr_p));
2690 /* Then strip away all but the outermost conversion. */
2691 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2693 /* And remove the outermost conversion if it's useless. */
2694 if (tree_ssa_useless_type_conversion (*expr_p))
2695 *expr_p = TREE_OPERAND (*expr_p, 0);
2697 /* If we still have a conversion at the toplevel,
2698 then canonicalize some constructs. */
2699 if (CONVERT_EXPR_P (*expr_p))
2701 tree sub = TREE_OPERAND (*expr_p, 0);
2703 /* If a NOP conversion is changing the type of a COMPONENT_REF
2704 expression, then canonicalize its type now in order to expose more
2705 redundant conversions. */
2706 if (TREE_CODE (sub) == COMPONENT_REF)
2707 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2709 /* If a NOP conversion is changing a pointer to array of foo
2710 to a pointer to foo, embed that change in the ADDR_EXPR. */
2711 else if (TREE_CODE (sub) == ADDR_EXPR)
2712 canonicalize_addr_expr (expr_p);
2715 /* If we have a conversion to a non-register type force the
2716 use of a VIEW_CONVERT_EXPR instead. */
2717 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2718 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2719 TREE_OPERAND (*expr_p, 0));
2721 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2722 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2723 TREE_SET_CODE (*expr_p, NOP_EXPR);
2725 return GS_OK;
2728 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2729 DECL_VALUE_EXPR, and it's worth re-examining things. */
2731 static enum gimplify_status
2732 gimplify_var_or_parm_decl (tree *expr_p)
2734 tree decl = *expr_p;
2736 /* ??? If this is a local variable, and it has not been seen in any
2737 outer BIND_EXPR, then it's probably the result of a duplicate
2738 declaration, for which we've already issued an error. It would
2739 be really nice if the front end wouldn't leak these at all.
2740 Currently the only known culprit is C++ destructors, as seen
2741 in g++.old-deja/g++.jason/binding.C. */
2742 if (VAR_P (decl)
2743 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2744 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2745 && decl_function_context (decl) == current_function_decl)
2747 gcc_assert (seen_error ());
2748 return GS_ERROR;
2751 /* When within an OMP context, notice uses of variables. */
2752 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2753 return GS_ALL_DONE;
2755 /* If the decl is an alias for another expression, substitute it now. */
2756 if (DECL_HAS_VALUE_EXPR_P (decl))
2758 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2759 return GS_OK;
2762 return GS_ALL_DONE;
2765 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2767 static void
2768 recalculate_side_effects (tree t)
2770 enum tree_code code = TREE_CODE (t);
2771 int len = TREE_OPERAND_LENGTH (t);
2772 int i;
2774 switch (TREE_CODE_CLASS (code))
2776 case tcc_expression:
2777 switch (code)
2779 case INIT_EXPR:
2780 case MODIFY_EXPR:
2781 case VA_ARG_EXPR:
2782 case PREDECREMENT_EXPR:
2783 case PREINCREMENT_EXPR:
2784 case POSTDECREMENT_EXPR:
2785 case POSTINCREMENT_EXPR:
2786 /* All of these have side-effects, no matter what their
2787 operands are. */
2788 return;
2790 default:
2791 break;
2793 /* Fall through. */
2795 case tcc_comparison: /* a comparison expression */
2796 case tcc_unary: /* a unary arithmetic expression */
2797 case tcc_binary: /* a binary arithmetic expression */
2798 case tcc_reference: /* a reference */
2799 case tcc_vl_exp: /* a function call */
2800 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2801 for (i = 0; i < len; ++i)
2803 tree op = TREE_OPERAND (t, i);
2804 if (op && TREE_SIDE_EFFECTS (op))
2805 TREE_SIDE_EFFECTS (t) = 1;
2807 break;
2809 case tcc_constant:
2810 /* No side-effects. */
2811 return;
2813 default:
2814 gcc_unreachable ();
2818 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2819 node *EXPR_P.
2821 compound_lval
2822 : min_lval '[' val ']'
2823 | min_lval '.' ID
2824 | compound_lval '[' val ']'
2825 | compound_lval '.' ID
2827 This is not part of the original SIMPLE definition, which separates
2828 array and member references, but it seems reasonable to handle them
2829 together. Also, this way we don't run into problems with union
2830 aliasing; gcc requires that for accesses through a union to alias, the
2831 union reference must be explicit, which was not always the case when we
2832 were splitting up array and member refs.
2834 PRE_P points to the sequence where side effects that must happen before
2835 *EXPR_P should be stored.
2837 POST_P points to the sequence where side effects that must happen after
2838 *EXPR_P should be stored. */
2840 static enum gimplify_status
2841 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2842 fallback_t fallback)
2844 tree *p;
2845 enum gimplify_status ret = GS_ALL_DONE, tret;
2846 int i;
2847 location_t loc = EXPR_LOCATION (*expr_p);
2848 tree expr = *expr_p;
2850 /* Create a stack of the subexpressions so later we can walk them in
2851 order from inner to outer. */
2852 auto_vec<tree, 10> expr_stack;
2854 /* We can handle anything that get_inner_reference can deal with. */
2855 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2857 restart:
2858 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2859 if (TREE_CODE (*p) == INDIRECT_REF)
2860 *p = fold_indirect_ref_loc (loc, *p);
2862 if (handled_component_p (*p))
2864 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2865 additional COMPONENT_REFs. */
2866 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2867 && gimplify_var_or_parm_decl (p) == GS_OK)
2868 goto restart;
2869 else
2870 break;
2872 expr_stack.safe_push (*p);
2875 gcc_assert (expr_stack.length ());
2877 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2878 walked through and P points to the innermost expression.
2880 Java requires that we elaborated nodes in source order. That
2881 means we must gimplify the inner expression followed by each of
2882 the indices, in order. But we can't gimplify the inner
2883 expression until we deal with any variable bounds, sizes, or
2884 positions in order to deal with PLACEHOLDER_EXPRs.
2886 So we do this in three steps. First we deal with the annotations
2887 for any variables in the components, then we gimplify the base,
2888 then we gimplify any indices, from left to right. */
2889 for (i = expr_stack.length () - 1; i >= 0; i--)
2891 tree t = expr_stack[i];
2893 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2895 /* Gimplify the low bound and element type size and put them into
2896 the ARRAY_REF. If these values are set, they have already been
2897 gimplified. */
2898 if (TREE_OPERAND (t, 2) == NULL_TREE)
2900 tree low = unshare_expr (array_ref_low_bound (t));
2901 if (!is_gimple_min_invariant (low))
2903 TREE_OPERAND (t, 2) = low;
2904 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2905 post_p, is_gimple_reg,
2906 fb_rvalue);
2907 ret = MIN (ret, tret);
2910 else
2912 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2913 is_gimple_reg, fb_rvalue);
2914 ret = MIN (ret, tret);
2917 if (TREE_OPERAND (t, 3) == NULL_TREE)
2919 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2920 tree elmt_size = unshare_expr (array_ref_element_size (t));
2921 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2923 /* Divide the element size by the alignment of the element
2924 type (above). */
2925 elmt_size
2926 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2928 if (!is_gimple_min_invariant (elmt_size))
2930 TREE_OPERAND (t, 3) = elmt_size;
2931 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2932 post_p, is_gimple_reg,
2933 fb_rvalue);
2934 ret = MIN (ret, tret);
2937 else
2939 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2940 is_gimple_reg, fb_rvalue);
2941 ret = MIN (ret, tret);
2944 else if (TREE_CODE (t) == COMPONENT_REF)
2946 /* Set the field offset into T and gimplify it. */
2947 if (TREE_OPERAND (t, 2) == NULL_TREE)
2949 tree offset = unshare_expr (component_ref_field_offset (t));
2950 tree field = TREE_OPERAND (t, 1);
2951 tree factor
2952 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2954 /* Divide the offset by its alignment. */
2955 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2957 if (!is_gimple_min_invariant (offset))
2959 TREE_OPERAND (t, 2) = offset;
2960 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2961 post_p, is_gimple_reg,
2962 fb_rvalue);
2963 ret = MIN (ret, tret);
2966 else
2968 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2969 is_gimple_reg, fb_rvalue);
2970 ret = MIN (ret, tret);
2975 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2976 so as to match the min_lval predicate. Failure to do so may result
2977 in the creation of large aggregate temporaries. */
2978 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2979 fallback | fb_lvalue);
2980 ret = MIN (ret, tret);
2982 /* And finally, the indices and operands of ARRAY_REF. During this
2983 loop we also remove any useless conversions. */
2984 for (; expr_stack.length () > 0; )
2986 tree t = expr_stack.pop ();
2988 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2990 /* Gimplify the dimension. */
2991 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2993 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2994 is_gimple_val, fb_rvalue);
2995 ret = MIN (ret, tret);
2999 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3001 /* The innermost expression P may have originally had
3002 TREE_SIDE_EFFECTS set which would have caused all the outer
3003 expressions in *EXPR_P leading to P to also have had
3004 TREE_SIDE_EFFECTS set. */
3005 recalculate_side_effects (t);
3008 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3009 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3011 canonicalize_component_ref (expr_p);
3014 expr_stack.release ();
3016 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3018 return ret;
3021 /* Gimplify the self modifying expression pointed to by EXPR_P
3022 (++, --, +=, -=).
3024 PRE_P points to the list where side effects that must happen before
3025 *EXPR_P should be stored.
3027 POST_P points to the list where side effects that must happen after
3028 *EXPR_P should be stored.
3030 WANT_VALUE is nonzero iff we want to use the value of this expression
3031 in another expression.
3033 ARITH_TYPE is the type the computation should be performed in. */
3035 enum gimplify_status
3036 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3037 bool want_value, tree arith_type)
3039 enum tree_code code;
3040 tree lhs, lvalue, rhs, t1;
3041 gimple_seq post = NULL, *orig_post_p = post_p;
3042 bool postfix;
3043 enum tree_code arith_code;
3044 enum gimplify_status ret;
3045 location_t loc = EXPR_LOCATION (*expr_p);
3047 code = TREE_CODE (*expr_p);
3049 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3050 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3052 /* Prefix or postfix? */
3053 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3054 /* Faster to treat as prefix if result is not used. */
3055 postfix = want_value;
3056 else
3057 postfix = false;
3059 /* For postfix, make sure the inner expression's post side effects
3060 are executed after side effects from this expression. */
3061 if (postfix)
3062 post_p = &post;
3064 /* Add or subtract? */
3065 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3066 arith_code = PLUS_EXPR;
3067 else
3068 arith_code = MINUS_EXPR;
3070 /* Gimplify the LHS into a GIMPLE lvalue. */
3071 lvalue = TREE_OPERAND (*expr_p, 0);
3072 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3073 if (ret == GS_ERROR)
3074 return ret;
3076 /* Extract the operands to the arithmetic operation. */
3077 lhs = lvalue;
3078 rhs = TREE_OPERAND (*expr_p, 1);
3080 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3081 that as the result value and in the postqueue operation. */
3082 if (postfix)
3084 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3085 if (ret == GS_ERROR)
3086 return ret;
3088 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
3091 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3092 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3094 rhs = convert_to_ptrofftype_loc (loc, rhs);
3095 if (arith_code == MINUS_EXPR)
3096 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3097 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3099 else
3100 t1 = fold_convert (TREE_TYPE (*expr_p),
3101 fold_build2 (arith_code, arith_type,
3102 fold_convert (arith_type, lhs),
3103 fold_convert (arith_type, rhs)));
3105 if (postfix)
3107 gimplify_assign (lvalue, t1, pre_p);
3108 gimplify_seq_add_seq (orig_post_p, post);
3109 *expr_p = lhs;
3110 return GS_ALL_DONE;
3112 else
3114 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3115 return GS_OK;
3119 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3121 static void
3122 maybe_with_size_expr (tree *expr_p)
3124 tree expr = *expr_p;
3125 tree type = TREE_TYPE (expr);
3126 tree size;
3128 /* If we've already wrapped this or the type is error_mark_node, we can't do
3129 anything. */
3130 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3131 || type == error_mark_node)
3132 return;
3134 /* If the size isn't known or is a constant, we have nothing to do. */
3135 size = TYPE_SIZE_UNIT (type);
3136 if (!size || poly_int_tree_p (size))
3137 return;
3139 /* Otherwise, make a WITH_SIZE_EXPR. */
3140 size = unshare_expr (size);
3141 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3142 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3145 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3146 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3147 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3148 gimplified to an SSA name. */
3150 enum gimplify_status
3151 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3152 bool allow_ssa)
3154 bool (*test) (tree);
3155 fallback_t fb;
3157 /* In general, we allow lvalues for function arguments to avoid
3158 extra overhead of copying large aggregates out of even larger
3159 aggregates into temporaries only to copy the temporaries to
3160 the argument list. Make optimizers happy by pulling out to
3161 temporaries those types that fit in registers. */
3162 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3163 test = is_gimple_val, fb = fb_rvalue;
3164 else
3166 test = is_gimple_lvalue, fb = fb_either;
3167 /* Also strip a TARGET_EXPR that would force an extra copy. */
3168 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3170 tree init = TARGET_EXPR_INITIAL (*arg_p);
3171 if (init
3172 && !VOID_TYPE_P (TREE_TYPE (init)))
3173 *arg_p = init;
3177 /* If this is a variable sized type, we must remember the size. */
3178 maybe_with_size_expr (arg_p);
3180 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3181 /* Make sure arguments have the same location as the function call
3182 itself. */
3183 protected_set_expr_location (*arg_p, call_location);
3185 /* There is a sequence point before a function call. Side effects in
3186 the argument list must occur before the actual call. So, when
3187 gimplifying arguments, force gimplify_expr to use an internal
3188 post queue which is then appended to the end of PRE_P. */
3189 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3192 /* Don't fold inside offloading or taskreg regions: it can break code by
3193 adding decl references that weren't in the source. We'll do it during
3194 omplower pass instead. */
3196 static bool
3197 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3199 struct gimplify_omp_ctx *ctx;
3200 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3201 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3202 return false;
3203 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3204 return false;
3205 /* Delay folding of builtins until the IL is in consistent state
3206 so the diagnostic machinery can do a better job. */
3207 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3208 return false;
3209 return fold_stmt (gsi);
3212 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3213 WANT_VALUE is true if the result of the call is desired. */
3215 static enum gimplify_status
3216 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3218 tree fndecl, parms, p, fnptrtype;
3219 enum gimplify_status ret;
3220 int i, nargs;
3221 gcall *call;
3222 bool builtin_va_start_p = false;
3223 location_t loc = EXPR_LOCATION (*expr_p);
3225 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3227 /* For reliable diagnostics during inlining, it is necessary that
3228 every call_expr be annotated with file and line. */
3229 if (! EXPR_HAS_LOCATION (*expr_p))
3230 SET_EXPR_LOCATION (*expr_p, input_location);
3232 /* Gimplify internal functions created in the FEs. */
3233 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3235 if (want_value)
3236 return GS_ALL_DONE;
3238 nargs = call_expr_nargs (*expr_p);
3239 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3240 auto_vec<tree> vargs (nargs);
3242 for (i = 0; i < nargs; i++)
3244 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3245 EXPR_LOCATION (*expr_p));
3246 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3249 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3250 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3251 gimplify_seq_add_stmt (pre_p, call);
3252 return GS_ALL_DONE;
3255 /* This may be a call to a builtin function.
3257 Builtin function calls may be transformed into different
3258 (and more efficient) builtin function calls under certain
3259 circumstances. Unfortunately, gimplification can muck things
3260 up enough that the builtin expanders are not aware that certain
3261 transformations are still valid.
3263 So we attempt transformation/gimplification of the call before
3264 we gimplify the CALL_EXPR. At this time we do not manage to
3265 transform all calls in the same manner as the expanders do, but
3266 we do transform most of them. */
3267 fndecl = get_callee_fndecl (*expr_p);
3268 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3269 switch (DECL_FUNCTION_CODE (fndecl))
3271 CASE_BUILT_IN_ALLOCA:
3272 /* If the call has been built for a variable-sized object, then we
3273 want to restore the stack level when the enclosing BIND_EXPR is
3274 exited to reclaim the allocated space; otherwise, we precisely
3275 need to do the opposite and preserve the latest stack level. */
3276 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3277 gimplify_ctxp->save_stack = true;
3278 else
3279 gimplify_ctxp->keep_stack = true;
3280 break;
3282 case BUILT_IN_VA_START:
3284 builtin_va_start_p = TRUE;
3285 if (call_expr_nargs (*expr_p) < 2)
3287 error ("too few arguments to function %<va_start%>");
3288 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3289 return GS_OK;
3292 if (fold_builtin_next_arg (*expr_p, true))
3294 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3295 return GS_OK;
3297 break;
3300 default:
3303 if (fndecl && fndecl_built_in_p (fndecl))
3305 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3306 if (new_tree && new_tree != *expr_p)
3308 /* There was a transformation of this call which computes the
3309 same value, but in a more efficient way. Return and try
3310 again. */
3311 *expr_p = new_tree;
3312 return GS_OK;
3316 /* Remember the original function pointer type. */
3317 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3319 /* There is a sequence point before the call, so any side effects in
3320 the calling expression must occur before the actual call. Force
3321 gimplify_expr to use an internal post queue. */
3322 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3323 is_gimple_call_addr, fb_rvalue);
3325 nargs = call_expr_nargs (*expr_p);
3327 /* Get argument types for verification. */
3328 fndecl = get_callee_fndecl (*expr_p);
3329 parms = NULL_TREE;
3330 if (fndecl)
3331 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3332 else
3333 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3335 if (fndecl && DECL_ARGUMENTS (fndecl))
3336 p = DECL_ARGUMENTS (fndecl);
3337 else if (parms)
3338 p = parms;
3339 else
3340 p = NULL_TREE;
3341 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3344 /* If the last argument is __builtin_va_arg_pack () and it is not
3345 passed as a named argument, decrease the number of CALL_EXPR
3346 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3347 if (!p
3348 && i < nargs
3349 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3351 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3352 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3354 if (last_arg_fndecl
3355 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3357 tree call = *expr_p;
3359 --nargs;
3360 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3361 CALL_EXPR_FN (call),
3362 nargs, CALL_EXPR_ARGP (call));
3364 /* Copy all CALL_EXPR flags, location and block, except
3365 CALL_EXPR_VA_ARG_PACK flag. */
3366 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3367 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3368 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3369 = CALL_EXPR_RETURN_SLOT_OPT (call);
3370 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3371 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3373 /* Set CALL_EXPR_VA_ARG_PACK. */
3374 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3378 /* If the call returns twice then after building the CFG the call
3379 argument computations will no longer dominate the call because
3380 we add an abnormal incoming edge to the call. So do not use SSA
3381 vars there. */
3382 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3384 /* Gimplify the function arguments. */
3385 if (nargs > 0)
3387 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3388 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3389 PUSH_ARGS_REVERSED ? i-- : i++)
3391 enum gimplify_status t;
3393 /* Avoid gimplifying the second argument to va_start, which needs to
3394 be the plain PARM_DECL. */
3395 if ((i != 1) || !builtin_va_start_p)
3397 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3398 EXPR_LOCATION (*expr_p), ! returns_twice);
3400 if (t == GS_ERROR)
3401 ret = GS_ERROR;
3406 /* Gimplify the static chain. */
3407 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3409 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3410 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3411 else
3413 enum gimplify_status t;
3414 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3415 EXPR_LOCATION (*expr_p), ! returns_twice);
3416 if (t == GS_ERROR)
3417 ret = GS_ERROR;
3421 /* Verify the function result. */
3422 if (want_value && fndecl
3423 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3425 error_at (loc, "using result of function returning %<void%>");
3426 ret = GS_ERROR;
3429 /* Try this again in case gimplification exposed something. */
3430 if (ret != GS_ERROR)
3432 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3434 if (new_tree && new_tree != *expr_p)
3436 /* There was a transformation of this call which computes the
3437 same value, but in a more efficient way. Return and try
3438 again. */
3439 *expr_p = new_tree;
3440 return GS_OK;
3443 else
3445 *expr_p = error_mark_node;
3446 return GS_ERROR;
3449 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3450 decl. This allows us to eliminate redundant or useless
3451 calls to "const" functions. */
3452 if (TREE_CODE (*expr_p) == CALL_EXPR)
3454 int flags = call_expr_flags (*expr_p);
3455 if (flags & (ECF_CONST | ECF_PURE)
3456 /* An infinite loop is considered a side effect. */
3457 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3458 TREE_SIDE_EFFECTS (*expr_p) = 0;
3461 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3462 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3463 form and delegate the creation of a GIMPLE_CALL to
3464 gimplify_modify_expr. This is always possible because when
3465 WANT_VALUE is true, the caller wants the result of this call into
3466 a temporary, which means that we will emit an INIT_EXPR in
3467 internal_get_tmp_var which will then be handled by
3468 gimplify_modify_expr. */
3469 if (!want_value)
3471 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3472 have to do is replicate it as a GIMPLE_CALL tuple. */
3473 gimple_stmt_iterator gsi;
3474 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3475 notice_special_calls (call);
3476 gimplify_seq_add_stmt (pre_p, call);
3477 gsi = gsi_last (*pre_p);
3478 maybe_fold_stmt (&gsi);
3479 *expr_p = NULL_TREE;
3481 else
3482 /* Remember the original function type. */
3483 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3484 CALL_EXPR_FN (*expr_p));
3486 return ret;
3489 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3490 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3492 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3493 condition is true or false, respectively. If null, we should generate
3494 our own to skip over the evaluation of this specific expression.
3496 LOCUS is the source location of the COND_EXPR.
3498 This function is the tree equivalent of do_jump.
3500 shortcut_cond_r should only be called by shortcut_cond_expr. */
3502 static tree
3503 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3504 location_t locus)
3506 tree local_label = NULL_TREE;
3507 tree t, expr = NULL;
3509 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3510 retain the shortcut semantics. Just insert the gotos here;
3511 shortcut_cond_expr will append the real blocks later. */
3512 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3514 location_t new_locus;
3516 /* Turn if (a && b) into
3518 if (a); else goto no;
3519 if (b) goto yes; else goto no;
3520 (no:) */
3522 if (false_label_p == NULL)
3523 false_label_p = &local_label;
3525 /* Keep the original source location on the first 'if'. */
3526 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3527 append_to_statement_list (t, &expr);
3529 /* Set the source location of the && on the second 'if'. */
3530 new_locus = rexpr_location (pred, locus);
3531 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3532 new_locus);
3533 append_to_statement_list (t, &expr);
3535 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3537 location_t new_locus;
3539 /* Turn if (a || b) into
3541 if (a) goto yes;
3542 if (b) goto yes; else goto no;
3543 (yes:) */
3545 if (true_label_p == NULL)
3546 true_label_p = &local_label;
3548 /* Keep the original source location on the first 'if'. */
3549 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3550 append_to_statement_list (t, &expr);
3552 /* Set the source location of the || on the second 'if'. */
3553 new_locus = rexpr_location (pred, locus);
3554 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3555 new_locus);
3556 append_to_statement_list (t, &expr);
3558 else if (TREE_CODE (pred) == COND_EXPR
3559 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3560 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3562 location_t new_locus;
3564 /* As long as we're messing with gotos, turn if (a ? b : c) into
3565 if (a)
3566 if (b) goto yes; else goto no;
3567 else
3568 if (c) goto yes; else goto no;
3570 Don't do this if one of the arms has void type, which can happen
3571 in C++ when the arm is throw. */
3573 /* Keep the original source location on the first 'if'. Set the source
3574 location of the ? on the second 'if'. */
3575 new_locus = rexpr_location (pred, locus);
3576 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3577 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3578 false_label_p, locus),
3579 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3580 false_label_p, new_locus));
3582 else
3584 expr = build3 (COND_EXPR, void_type_node, pred,
3585 build_and_jump (true_label_p),
3586 build_and_jump (false_label_p));
3587 SET_EXPR_LOCATION (expr, locus);
3590 if (local_label)
3592 t = build1 (LABEL_EXPR, void_type_node, local_label);
3593 append_to_statement_list (t, &expr);
3596 return expr;
3599 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3600 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3601 statement, if it is the last one. Otherwise, return NULL. */
3603 static tree
3604 find_goto (tree expr)
3606 if (!expr)
3607 return NULL_TREE;
3609 if (TREE_CODE (expr) == GOTO_EXPR)
3610 return expr;
3612 if (TREE_CODE (expr) != STATEMENT_LIST)
3613 return NULL_TREE;
3615 tree_stmt_iterator i = tsi_start (expr);
3617 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3618 tsi_next (&i);
3620 if (!tsi_one_before_end_p (i))
3621 return NULL_TREE;
3623 return find_goto (tsi_stmt (i));
3626 /* Same as find_goto, except that it returns NULL if the destination
3627 is not a LABEL_DECL. */
3629 static inline tree
3630 find_goto_label (tree expr)
3632 tree dest = find_goto (expr);
3633 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3634 return dest;
3635 return NULL_TREE;
3638 /* Given a conditional expression EXPR with short-circuit boolean
3639 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3640 predicate apart into the equivalent sequence of conditionals. */
3642 static tree
3643 shortcut_cond_expr (tree expr)
3645 tree pred = TREE_OPERAND (expr, 0);
3646 tree then_ = TREE_OPERAND (expr, 1);
3647 tree else_ = TREE_OPERAND (expr, 2);
3648 tree true_label, false_label, end_label, t;
3649 tree *true_label_p;
3650 tree *false_label_p;
3651 bool emit_end, emit_false, jump_over_else;
3652 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3653 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3655 /* First do simple transformations. */
3656 if (!else_se)
3658 /* If there is no 'else', turn
3659 if (a && b) then c
3660 into
3661 if (a) if (b) then c. */
3662 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3664 /* Keep the original source location on the first 'if'. */
3665 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3666 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3667 /* Set the source location of the && on the second 'if'. */
3668 if (rexpr_has_location (pred))
3669 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3670 then_ = shortcut_cond_expr (expr);
3671 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3672 pred = TREE_OPERAND (pred, 0);
3673 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3674 SET_EXPR_LOCATION (expr, locus);
3678 if (!then_se)
3680 /* If there is no 'then', turn
3681 if (a || b); else d
3682 into
3683 if (a); else if (b); else d. */
3684 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3686 /* Keep the original source location on the first 'if'. */
3687 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3688 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3689 /* Set the source location of the || on the second 'if'. */
3690 if (rexpr_has_location (pred))
3691 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3692 else_ = shortcut_cond_expr (expr);
3693 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3694 pred = TREE_OPERAND (pred, 0);
3695 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3696 SET_EXPR_LOCATION (expr, locus);
3700 /* If we're done, great. */
3701 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3702 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3703 return expr;
3705 /* Otherwise we need to mess with gotos. Change
3706 if (a) c; else d;
3708 if (a); else goto no;
3709 c; goto end;
3710 no: d; end:
3711 and recursively gimplify the condition. */
3713 true_label = false_label = end_label = NULL_TREE;
3715 /* If our arms just jump somewhere, hijack those labels so we don't
3716 generate jumps to jumps. */
3718 if (tree then_goto = find_goto_label (then_))
3720 true_label = GOTO_DESTINATION (then_goto);
3721 then_ = NULL;
3722 then_se = false;
3725 if (tree else_goto = find_goto_label (else_))
3727 false_label = GOTO_DESTINATION (else_goto);
3728 else_ = NULL;
3729 else_se = false;
3732 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3733 if (true_label)
3734 true_label_p = &true_label;
3735 else
3736 true_label_p = NULL;
3738 /* The 'else' branch also needs a label if it contains interesting code. */
3739 if (false_label || else_se)
3740 false_label_p = &false_label;
3741 else
3742 false_label_p = NULL;
3744 /* If there was nothing else in our arms, just forward the label(s). */
3745 if (!then_se && !else_se)
3746 return shortcut_cond_r (pred, true_label_p, false_label_p,
3747 EXPR_LOC_OR_LOC (expr, input_location));
3749 /* If our last subexpression already has a terminal label, reuse it. */
3750 if (else_se)
3751 t = expr_last (else_);
3752 else if (then_se)
3753 t = expr_last (then_);
3754 else
3755 t = NULL;
3756 if (t && TREE_CODE (t) == LABEL_EXPR)
3757 end_label = LABEL_EXPR_LABEL (t);
3759 /* If we don't care about jumping to the 'else' branch, jump to the end
3760 if the condition is false. */
3761 if (!false_label_p)
3762 false_label_p = &end_label;
3764 /* We only want to emit these labels if we aren't hijacking them. */
3765 emit_end = (end_label == NULL_TREE);
3766 emit_false = (false_label == NULL_TREE);
3768 /* We only emit the jump over the else clause if we have to--if the
3769 then clause may fall through. Otherwise we can wind up with a
3770 useless jump and a useless label at the end of gimplified code,
3771 which will cause us to think that this conditional as a whole
3772 falls through even if it doesn't. If we then inline a function
3773 which ends with such a condition, that can cause us to issue an
3774 inappropriate warning about control reaching the end of a
3775 non-void function. */
3776 jump_over_else = block_may_fallthru (then_);
3778 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3779 EXPR_LOC_OR_LOC (expr, input_location));
3781 expr = NULL;
3782 append_to_statement_list (pred, &expr);
3784 append_to_statement_list (then_, &expr);
3785 if (else_se)
3787 if (jump_over_else)
3789 tree last = expr_last (expr);
3790 t = build_and_jump (&end_label);
3791 if (rexpr_has_location (last))
3792 SET_EXPR_LOCATION (t, rexpr_location (last));
3793 append_to_statement_list (t, &expr);
3795 if (emit_false)
3797 t = build1 (LABEL_EXPR, void_type_node, false_label);
3798 append_to_statement_list (t, &expr);
3800 append_to_statement_list (else_, &expr);
3802 if (emit_end && end_label)
3804 t = build1 (LABEL_EXPR, void_type_node, end_label);
3805 append_to_statement_list (t, &expr);
3808 return expr;
3811 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3813 tree
3814 gimple_boolify (tree expr)
3816 tree type = TREE_TYPE (expr);
3817 location_t loc = EXPR_LOCATION (expr);
3819 if (TREE_CODE (expr) == NE_EXPR
3820 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3821 && integer_zerop (TREE_OPERAND (expr, 1)))
3823 tree call = TREE_OPERAND (expr, 0);
3824 tree fn = get_callee_fndecl (call);
3826 /* For __builtin_expect ((long) (x), y) recurse into x as well
3827 if x is truth_value_p. */
3828 if (fn
3829 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3830 && call_expr_nargs (call) == 2)
3832 tree arg = CALL_EXPR_ARG (call, 0);
3833 if (arg)
3835 if (TREE_CODE (arg) == NOP_EXPR
3836 && TREE_TYPE (arg) == TREE_TYPE (call))
3837 arg = TREE_OPERAND (arg, 0);
3838 if (truth_value_p (TREE_CODE (arg)))
3840 arg = gimple_boolify (arg);
3841 CALL_EXPR_ARG (call, 0)
3842 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3848 switch (TREE_CODE (expr))
3850 case TRUTH_AND_EXPR:
3851 case TRUTH_OR_EXPR:
3852 case TRUTH_XOR_EXPR:
3853 case TRUTH_ANDIF_EXPR:
3854 case TRUTH_ORIF_EXPR:
3855 /* Also boolify the arguments of truth exprs. */
3856 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3857 /* FALLTHRU */
3859 case TRUTH_NOT_EXPR:
3860 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3862 /* These expressions always produce boolean results. */
3863 if (TREE_CODE (type) != BOOLEAN_TYPE)
3864 TREE_TYPE (expr) = boolean_type_node;
3865 return expr;
3867 case ANNOTATE_EXPR:
3868 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3870 case annot_expr_ivdep_kind:
3871 case annot_expr_unroll_kind:
3872 case annot_expr_no_vector_kind:
3873 case annot_expr_vector_kind:
3874 case annot_expr_parallel_kind:
3875 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3876 if (TREE_CODE (type) != BOOLEAN_TYPE)
3877 TREE_TYPE (expr) = boolean_type_node;
3878 return expr;
3879 default:
3880 gcc_unreachable ();
3883 default:
3884 if (COMPARISON_CLASS_P (expr))
3886 /* There expressions always prduce boolean results. */
3887 if (TREE_CODE (type) != BOOLEAN_TYPE)
3888 TREE_TYPE (expr) = boolean_type_node;
3889 return expr;
3891 /* Other expressions that get here must have boolean values, but
3892 might need to be converted to the appropriate mode. */
3893 if (TREE_CODE (type) == BOOLEAN_TYPE)
3894 return expr;
3895 return fold_convert_loc (loc, boolean_type_node, expr);
3899 /* Given a conditional expression *EXPR_P without side effects, gimplify
3900 its operands. New statements are inserted to PRE_P. */
3902 static enum gimplify_status
3903 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3905 tree expr = *expr_p, cond;
3906 enum gimplify_status ret, tret;
3907 enum tree_code code;
3909 cond = gimple_boolify (COND_EXPR_COND (expr));
3911 /* We need to handle && and || specially, as their gimplification
3912 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3913 code = TREE_CODE (cond);
3914 if (code == TRUTH_ANDIF_EXPR)
3915 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3916 else if (code == TRUTH_ORIF_EXPR)
3917 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3918 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3919 COND_EXPR_COND (*expr_p) = cond;
3921 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3922 is_gimple_val, fb_rvalue);
3923 ret = MIN (ret, tret);
3924 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3925 is_gimple_val, fb_rvalue);
3927 return MIN (ret, tret);
3930 /* Return true if evaluating EXPR could trap.
3931 EXPR is GENERIC, while tree_could_trap_p can be called
3932 only on GIMPLE. */
3934 bool
3935 generic_expr_could_trap_p (tree expr)
3937 unsigned i, n;
3939 if (!expr || is_gimple_val (expr))
3940 return false;
3942 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3943 return true;
3945 n = TREE_OPERAND_LENGTH (expr);
3946 for (i = 0; i < n; i++)
3947 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3948 return true;
3950 return false;
3953 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3954 into
3956 if (p) if (p)
3957 t1 = a; a;
3958 else or else
3959 t1 = b; b;
3962 The second form is used when *EXPR_P is of type void.
3964 PRE_P points to the list where side effects that must happen before
3965 *EXPR_P should be stored. */
3967 static enum gimplify_status
3968 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3970 tree expr = *expr_p;
3971 tree type = TREE_TYPE (expr);
3972 location_t loc = EXPR_LOCATION (expr);
3973 tree tmp, arm1, arm2;
3974 enum gimplify_status ret;
3975 tree label_true, label_false, label_cont;
3976 bool have_then_clause_p, have_else_clause_p;
3977 gcond *cond_stmt;
3978 enum tree_code pred_code;
3979 gimple_seq seq = NULL;
3981 /* If this COND_EXPR has a value, copy the values into a temporary within
3982 the arms. */
3983 if (!VOID_TYPE_P (type))
3985 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3986 tree result;
3988 /* If either an rvalue is ok or we do not require an lvalue, create the
3989 temporary. But we cannot do that if the type is addressable. */
3990 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3991 && !TREE_ADDRESSABLE (type))
3993 if (gimplify_ctxp->allow_rhs_cond_expr
3994 /* If either branch has side effects or could trap, it can't be
3995 evaluated unconditionally. */
3996 && !TREE_SIDE_EFFECTS (then_)
3997 && !generic_expr_could_trap_p (then_)
3998 && !TREE_SIDE_EFFECTS (else_)
3999 && !generic_expr_could_trap_p (else_))
4000 return gimplify_pure_cond_expr (expr_p, pre_p);
4002 tmp = create_tmp_var (type, "iftmp");
4003 result = tmp;
4006 /* Otherwise, only create and copy references to the values. */
4007 else
4009 type = build_pointer_type (type);
4011 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4012 then_ = build_fold_addr_expr_loc (loc, then_);
4014 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4015 else_ = build_fold_addr_expr_loc (loc, else_);
4017 expr
4018 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4020 tmp = create_tmp_var (type, "iftmp");
4021 result = build_simple_mem_ref_loc (loc, tmp);
4024 /* Build the new then clause, `tmp = then_;'. But don't build the
4025 assignment if the value is void; in C++ it can be if it's a throw. */
4026 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4027 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4029 /* Similarly, build the new else clause, `tmp = else_;'. */
4030 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4031 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4033 TREE_TYPE (expr) = void_type_node;
4034 recalculate_side_effects (expr);
4036 /* Move the COND_EXPR to the prequeue. */
4037 gimplify_stmt (&expr, pre_p);
4039 *expr_p = result;
4040 return GS_ALL_DONE;
4043 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4044 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4045 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4046 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4048 /* Make sure the condition has BOOLEAN_TYPE. */
4049 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4051 /* Break apart && and || conditions. */
4052 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4053 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4055 expr = shortcut_cond_expr (expr);
4057 if (expr != *expr_p)
4059 *expr_p = expr;
4061 /* We can't rely on gimplify_expr to re-gimplify the expanded
4062 form properly, as cleanups might cause the target labels to be
4063 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4064 set up a conditional context. */
4065 gimple_push_condition ();
4066 gimplify_stmt (expr_p, &seq);
4067 gimple_pop_condition (pre_p);
4068 gimple_seq_add_seq (pre_p, seq);
4070 return GS_ALL_DONE;
4074 /* Now do the normal gimplification. */
4076 /* Gimplify condition. */
4077 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
4078 fb_rvalue);
4079 if (ret == GS_ERROR)
4080 return GS_ERROR;
4081 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4083 gimple_push_condition ();
4085 have_then_clause_p = have_else_clause_p = false;
4086 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4087 if (label_true
4088 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4089 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4090 have different locations, otherwise we end up with incorrect
4091 location information on the branches. */
4092 && (optimize
4093 || !EXPR_HAS_LOCATION (expr)
4094 || !rexpr_has_location (label_true)
4095 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4097 have_then_clause_p = true;
4098 label_true = GOTO_DESTINATION (label_true);
4100 else
4101 label_true = create_artificial_label (UNKNOWN_LOCATION);
4102 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4103 if (label_false
4104 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4105 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4106 have different locations, otherwise we end up with incorrect
4107 location information on the branches. */
4108 && (optimize
4109 || !EXPR_HAS_LOCATION (expr)
4110 || !rexpr_has_location (label_false)
4111 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4113 have_else_clause_p = true;
4114 label_false = GOTO_DESTINATION (label_false);
4116 else
4117 label_false = create_artificial_label (UNKNOWN_LOCATION);
4119 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4120 &arm2);
4121 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4122 label_false);
4123 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4124 gimplify_seq_add_stmt (&seq, cond_stmt);
4125 gimple_stmt_iterator gsi = gsi_last (seq);
4126 maybe_fold_stmt (&gsi);
4128 label_cont = NULL_TREE;
4129 if (!have_then_clause_p)
4131 /* For if (...) {} else { code; } put label_true after
4132 the else block. */
4133 if (TREE_OPERAND (expr, 1) == NULL_TREE
4134 && !have_else_clause_p
4135 && TREE_OPERAND (expr, 2) != NULL_TREE)
4136 label_cont = label_true;
4137 else
4139 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4140 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4141 /* For if (...) { code; } else {} or
4142 if (...) { code; } else goto label; or
4143 if (...) { code; return; } else { ... }
4144 label_cont isn't needed. */
4145 if (!have_else_clause_p
4146 && TREE_OPERAND (expr, 2) != NULL_TREE
4147 && gimple_seq_may_fallthru (seq))
4149 gimple *g;
4150 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4152 g = gimple_build_goto (label_cont);
4154 /* GIMPLE_COND's are very low level; they have embedded
4155 gotos. This particular embedded goto should not be marked
4156 with the location of the original COND_EXPR, as it would
4157 correspond to the COND_EXPR's condition, not the ELSE or the
4158 THEN arms. To avoid marking it with the wrong location, flag
4159 it as "no location". */
4160 gimple_set_do_not_emit_location (g);
4162 gimplify_seq_add_stmt (&seq, g);
4166 if (!have_else_clause_p)
4168 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4169 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4171 if (label_cont)
4172 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4174 gimple_pop_condition (pre_p);
4175 gimple_seq_add_seq (pre_p, seq);
4177 if (ret == GS_ERROR)
4178 ; /* Do nothing. */
4179 else if (have_then_clause_p || have_else_clause_p)
4180 ret = GS_ALL_DONE;
4181 else
4183 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4184 expr = TREE_OPERAND (expr, 0);
4185 gimplify_stmt (&expr, pre_p);
4188 *expr_p = NULL;
4189 return ret;
4192 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4193 to be marked addressable.
4195 We cannot rely on such an expression being directly markable if a temporary
4196 has been created by the gimplification. In this case, we create another
4197 temporary and initialize it with a copy, which will become a store after we
4198 mark it addressable. This can happen if the front-end passed us something
4199 that it could not mark addressable yet, like a Fortran pass-by-reference
4200 parameter (int) floatvar. */
4202 static void
4203 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4205 while (handled_component_p (*expr_p))
4206 expr_p = &TREE_OPERAND (*expr_p, 0);
4207 if (is_gimple_reg (*expr_p))
4209 /* Do not allow an SSA name as the temporary. */
4210 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4211 DECL_GIMPLE_REG_P (var) = 0;
4212 *expr_p = var;
4216 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4217 a call to __builtin_memcpy. */
4219 static enum gimplify_status
4220 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4221 gimple_seq *seq_p)
4223 tree t, to, to_ptr, from, from_ptr;
4224 gcall *gs;
4225 location_t loc = EXPR_LOCATION (*expr_p);
4227 to = TREE_OPERAND (*expr_p, 0);
4228 from = TREE_OPERAND (*expr_p, 1);
4230 /* Mark the RHS addressable. Beware that it may not be possible to do so
4231 directly if a temporary has been created by the gimplification. */
4232 prepare_gimple_addressable (&from, seq_p);
4234 mark_addressable (from);
4235 from_ptr = build_fold_addr_expr_loc (loc, from);
4236 gimplify_arg (&from_ptr, seq_p, loc);
4238 mark_addressable (to);
4239 to_ptr = build_fold_addr_expr_loc (loc, to);
4240 gimplify_arg (&to_ptr, seq_p, loc);
4242 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4244 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4246 if (want_value)
4248 /* tmp = memcpy() */
4249 t = create_tmp_var (TREE_TYPE (to_ptr));
4250 gimple_call_set_lhs (gs, t);
4251 gimplify_seq_add_stmt (seq_p, gs);
4253 *expr_p = build_simple_mem_ref (t);
4254 return GS_ALL_DONE;
4257 gimplify_seq_add_stmt (seq_p, gs);
4258 *expr_p = NULL;
4259 return GS_ALL_DONE;
4262 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4263 a call to __builtin_memset. In this case we know that the RHS is
4264 a CONSTRUCTOR with an empty element list. */
4266 static enum gimplify_status
4267 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4268 gimple_seq *seq_p)
4270 tree t, from, to, to_ptr;
4271 gcall *gs;
4272 location_t loc = EXPR_LOCATION (*expr_p);
4274 /* Assert our assumptions, to abort instead of producing wrong code
4275 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4276 not be immediately exposed. */
4277 from = TREE_OPERAND (*expr_p, 1);
4278 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4279 from = TREE_OPERAND (from, 0);
4281 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4282 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4284 /* Now proceed. */
4285 to = TREE_OPERAND (*expr_p, 0);
4287 to_ptr = build_fold_addr_expr_loc (loc, to);
4288 gimplify_arg (&to_ptr, seq_p, loc);
4289 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4291 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4293 if (want_value)
4295 /* tmp = memset() */
4296 t = create_tmp_var (TREE_TYPE (to_ptr));
4297 gimple_call_set_lhs (gs, t);
4298 gimplify_seq_add_stmt (seq_p, gs);
4300 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4301 return GS_ALL_DONE;
4304 gimplify_seq_add_stmt (seq_p, gs);
4305 *expr_p = NULL;
4306 return GS_ALL_DONE;
4309 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4310 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4311 assignment. Return non-null if we detect a potential overlap. */
4313 struct gimplify_init_ctor_preeval_data
4315 /* The base decl of the lhs object. May be NULL, in which case we
4316 have to assume the lhs is indirect. */
4317 tree lhs_base_decl;
4319 /* The alias set of the lhs object. */
4320 alias_set_type lhs_alias_set;
4323 static tree
4324 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4326 struct gimplify_init_ctor_preeval_data *data
4327 = (struct gimplify_init_ctor_preeval_data *) xdata;
4328 tree t = *tp;
4330 /* If we find the base object, obviously we have overlap. */
4331 if (data->lhs_base_decl == t)
4332 return t;
4334 /* If the constructor component is indirect, determine if we have a
4335 potential overlap with the lhs. The only bits of information we
4336 have to go on at this point are addressability and alias sets. */
4337 if ((INDIRECT_REF_P (t)
4338 || TREE_CODE (t) == MEM_REF)
4339 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4340 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4341 return t;
4343 /* If the constructor component is a call, determine if it can hide a
4344 potential overlap with the lhs through an INDIRECT_REF like above.
4345 ??? Ugh - this is completely broken. In fact this whole analysis
4346 doesn't look conservative. */
4347 if (TREE_CODE (t) == CALL_EXPR)
4349 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4351 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4352 if (POINTER_TYPE_P (TREE_VALUE (type))
4353 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4354 && alias_sets_conflict_p (data->lhs_alias_set,
4355 get_alias_set
4356 (TREE_TYPE (TREE_VALUE (type)))))
4357 return t;
4360 if (IS_TYPE_OR_DECL_P (t))
4361 *walk_subtrees = 0;
4362 return NULL;
4365 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4366 force values that overlap with the lhs (as described by *DATA)
4367 into temporaries. */
4369 static void
4370 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4371 struct gimplify_init_ctor_preeval_data *data)
4373 enum gimplify_status one;
4375 /* If the value is constant, then there's nothing to pre-evaluate. */
4376 if (TREE_CONSTANT (*expr_p))
4378 /* Ensure it does not have side effects, it might contain a reference to
4379 the object we're initializing. */
4380 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4381 return;
4384 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4385 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4386 return;
4388 /* Recurse for nested constructors. */
4389 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4391 unsigned HOST_WIDE_INT ix;
4392 constructor_elt *ce;
4393 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4395 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4396 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4398 return;
4401 /* If this is a variable sized type, we must remember the size. */
4402 maybe_with_size_expr (expr_p);
4404 /* Gimplify the constructor element to something appropriate for the rhs
4405 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4406 the gimplifier will consider this a store to memory. Doing this
4407 gimplification now means that we won't have to deal with complicated
4408 language-specific trees, nor trees like SAVE_EXPR that can induce
4409 exponential search behavior. */
4410 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4411 if (one == GS_ERROR)
4413 *expr_p = NULL;
4414 return;
4417 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4418 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4419 always be true for all scalars, since is_gimple_mem_rhs insists on a
4420 temporary variable for them. */
4421 if (DECL_P (*expr_p))
4422 return;
4424 /* If this is of variable size, we have no choice but to assume it doesn't
4425 overlap since we can't make a temporary for it. */
4426 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4427 return;
4429 /* Otherwise, we must search for overlap ... */
4430 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4431 return;
4433 /* ... and if found, force the value into a temporary. */
4434 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4437 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4438 a RANGE_EXPR in a CONSTRUCTOR for an array.
4440 var = lower;
4441 loop_entry:
4442 object[var] = value;
4443 if (var == upper)
4444 goto loop_exit;
4445 var = var + 1;
4446 goto loop_entry;
4447 loop_exit:
4449 We increment var _after_ the loop exit check because we might otherwise
4450 fail if upper == TYPE_MAX_VALUE (type for upper).
4452 Note that we never have to deal with SAVE_EXPRs here, because this has
4453 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4455 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4456 gimple_seq *, bool);
4458 static void
4459 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4460 tree value, tree array_elt_type,
4461 gimple_seq *pre_p, bool cleared)
4463 tree loop_entry_label, loop_exit_label, fall_thru_label;
4464 tree var, var_type, cref, tmp;
4466 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4467 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4468 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4470 /* Create and initialize the index variable. */
4471 var_type = TREE_TYPE (upper);
4472 var = create_tmp_var (var_type);
4473 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4475 /* Add the loop entry label. */
4476 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4478 /* Build the reference. */
4479 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4480 var, NULL_TREE, NULL_TREE);
4482 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4483 the store. Otherwise just assign value to the reference. */
4485 if (TREE_CODE (value) == CONSTRUCTOR)
4486 /* NB we might have to call ourself recursively through
4487 gimplify_init_ctor_eval if the value is a constructor. */
4488 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4489 pre_p, cleared);
4490 else
4491 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4493 /* We exit the loop when the index var is equal to the upper bound. */
4494 gimplify_seq_add_stmt (pre_p,
4495 gimple_build_cond (EQ_EXPR, var, upper,
4496 loop_exit_label, fall_thru_label));
4498 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4500 /* Otherwise, increment the index var... */
4501 tmp = build2 (PLUS_EXPR, var_type, var,
4502 fold_convert (var_type, integer_one_node));
4503 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4505 /* ...and jump back to the loop entry. */
4506 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4508 /* Add the loop exit label. */
4509 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4512 /* Return true if FDECL is accessing a field that is zero sized. */
4514 static bool
4515 zero_sized_field_decl (const_tree fdecl)
4517 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4518 && integer_zerop (DECL_SIZE (fdecl)))
4519 return true;
4520 return false;
4523 /* Return true if TYPE is zero sized. */
4525 static bool
4526 zero_sized_type (const_tree type)
4528 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4529 && integer_zerop (TYPE_SIZE (type)))
4530 return true;
4531 return false;
4534 /* A subroutine of gimplify_init_constructor. Generate individual
4535 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4536 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4537 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4538 zeroed first. */
4540 static void
4541 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4542 gimple_seq *pre_p, bool cleared)
4544 tree array_elt_type = NULL;
4545 unsigned HOST_WIDE_INT ix;
4546 tree purpose, value;
4548 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4549 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4551 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4553 tree cref;
4555 /* NULL values are created above for gimplification errors. */
4556 if (value == NULL)
4557 continue;
4559 if (cleared && initializer_zerop (value))
4560 continue;
4562 /* ??? Here's to hoping the front end fills in all of the indices,
4563 so we don't have to figure out what's missing ourselves. */
4564 gcc_assert (purpose);
4566 /* Skip zero-sized fields, unless value has side-effects. This can
4567 happen with calls to functions returning a zero-sized type, which
4568 we shouldn't discard. As a number of downstream passes don't
4569 expect sets of zero-sized fields, we rely on the gimplification of
4570 the MODIFY_EXPR we make below to drop the assignment statement. */
4571 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4572 continue;
4574 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4575 whole range. */
4576 if (TREE_CODE (purpose) == RANGE_EXPR)
4578 tree lower = TREE_OPERAND (purpose, 0);
4579 tree upper = TREE_OPERAND (purpose, 1);
4581 /* If the lower bound is equal to upper, just treat it as if
4582 upper was the index. */
4583 if (simple_cst_equal (lower, upper))
4584 purpose = upper;
4585 else
4587 gimplify_init_ctor_eval_range (object, lower, upper, value,
4588 array_elt_type, pre_p, cleared);
4589 continue;
4593 if (array_elt_type)
4595 /* Do not use bitsizetype for ARRAY_REF indices. */
4596 if (TYPE_DOMAIN (TREE_TYPE (object)))
4597 purpose
4598 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4599 purpose);
4600 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4601 purpose, NULL_TREE, NULL_TREE);
4603 else
4605 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4606 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4607 unshare_expr (object), purpose, NULL_TREE);
4610 if (TREE_CODE (value) == CONSTRUCTOR
4611 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4612 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4613 pre_p, cleared);
4614 else
4616 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4617 gimplify_and_add (init, pre_p);
4618 ggc_free (init);
4623 /* Return the appropriate RHS predicate for this LHS. */
4625 gimple_predicate
4626 rhs_predicate_for (tree lhs)
4628 if (is_gimple_reg (lhs))
4629 return is_gimple_reg_rhs_or_call;
4630 else
4631 return is_gimple_mem_rhs_or_call;
4634 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4635 before the LHS has been gimplified. */
4637 static gimple_predicate
4638 initial_rhs_predicate_for (tree lhs)
4640 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4641 return is_gimple_reg_rhs_or_call;
4642 else
4643 return is_gimple_mem_rhs_or_call;
4646 /* Gimplify a C99 compound literal expression. This just means adding
4647 the DECL_EXPR before the current statement and using its anonymous
4648 decl instead. */
4650 static enum gimplify_status
4651 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4652 bool (*gimple_test_f) (tree),
4653 fallback_t fallback)
4655 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4656 tree decl = DECL_EXPR_DECL (decl_s);
4657 tree init = DECL_INITIAL (decl);
4658 /* Mark the decl as addressable if the compound literal
4659 expression is addressable now, otherwise it is marked too late
4660 after we gimplify the initialization expression. */
4661 if (TREE_ADDRESSABLE (*expr_p))
4662 TREE_ADDRESSABLE (decl) = 1;
4663 /* Otherwise, if we don't need an lvalue and have a literal directly
4664 substitute it. Check if it matches the gimple predicate, as
4665 otherwise we'd generate a new temporary, and we can as well just
4666 use the decl we already have. */
4667 else if (!TREE_ADDRESSABLE (decl)
4668 && init
4669 && (fallback & fb_lvalue) == 0
4670 && gimple_test_f (init))
4672 *expr_p = init;
4673 return GS_OK;
4676 /* Preliminarily mark non-addressed complex variables as eligible
4677 for promotion to gimple registers. We'll transform their uses
4678 as we find them. */
4679 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4680 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4681 && !TREE_THIS_VOLATILE (decl)
4682 && !needs_to_live_in_memory (decl))
4683 DECL_GIMPLE_REG_P (decl) = 1;
4685 /* If the decl is not addressable, then it is being used in some
4686 expression or on the right hand side of a statement, and it can
4687 be put into a readonly data section. */
4688 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4689 TREE_READONLY (decl) = 1;
4691 /* This decl isn't mentioned in the enclosing block, so add it to the
4692 list of temps. FIXME it seems a bit of a kludge to say that
4693 anonymous artificial vars aren't pushed, but everything else is. */
4694 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4695 gimple_add_tmp_var (decl);
4697 gimplify_and_add (decl_s, pre_p);
4698 *expr_p = decl;
4699 return GS_OK;
4702 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4703 return a new CONSTRUCTOR if something changed. */
4705 static tree
4706 optimize_compound_literals_in_ctor (tree orig_ctor)
4708 tree ctor = orig_ctor;
4709 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4710 unsigned int idx, num = vec_safe_length (elts);
4712 for (idx = 0; idx < num; idx++)
4714 tree value = (*elts)[idx].value;
4715 tree newval = value;
4716 if (TREE_CODE (value) == CONSTRUCTOR)
4717 newval = optimize_compound_literals_in_ctor (value);
4718 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4720 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4721 tree decl = DECL_EXPR_DECL (decl_s);
4722 tree init = DECL_INITIAL (decl);
4724 if (!TREE_ADDRESSABLE (value)
4725 && !TREE_ADDRESSABLE (decl)
4726 && init
4727 && TREE_CODE (init) == CONSTRUCTOR)
4728 newval = optimize_compound_literals_in_ctor (init);
4730 if (newval == value)
4731 continue;
4733 if (ctor == orig_ctor)
4735 ctor = copy_node (orig_ctor);
4736 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4737 elts = CONSTRUCTOR_ELTS (ctor);
4739 (*elts)[idx].value = newval;
4741 return ctor;
4744 /* A subroutine of gimplify_modify_expr. Break out elements of a
4745 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4747 Note that we still need to clear any elements that don't have explicit
4748 initializers, so if not all elements are initialized we keep the
4749 original MODIFY_EXPR, we just remove all of the constructor elements.
4751 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4752 GS_ERROR if we would have to create a temporary when gimplifying
4753 this constructor. Otherwise, return GS_OK.
4755 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4757 static enum gimplify_status
4758 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4759 bool want_value, bool notify_temp_creation)
4761 tree object, ctor, type;
4762 enum gimplify_status ret;
4763 vec<constructor_elt, va_gc> *elts;
4765 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4767 if (!notify_temp_creation)
4769 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4770 is_gimple_lvalue, fb_lvalue);
4771 if (ret == GS_ERROR)
4772 return ret;
4775 object = TREE_OPERAND (*expr_p, 0);
4776 ctor = TREE_OPERAND (*expr_p, 1)
4777 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4778 type = TREE_TYPE (ctor);
4779 elts = CONSTRUCTOR_ELTS (ctor);
4780 ret = GS_ALL_DONE;
4782 switch (TREE_CODE (type))
4784 case RECORD_TYPE:
4785 case UNION_TYPE:
4786 case QUAL_UNION_TYPE:
4787 case ARRAY_TYPE:
4789 struct gimplify_init_ctor_preeval_data preeval_data;
4790 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4791 HOST_WIDE_INT num_unique_nonzero_elements;
4792 bool cleared, complete_p, valid_const_initializer;
4793 /* Use readonly data for initializers of this or smaller size
4794 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4795 ratio. */
4796 const HOST_WIDE_INT min_unique_size = 64;
4797 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4798 is smaller than this, use readonly data. */
4799 const int unique_nonzero_ratio = 8;
4801 /* Aggregate types must lower constructors to initialization of
4802 individual elements. The exception is that a CONSTRUCTOR node
4803 with no elements indicates zero-initialization of the whole. */
4804 if (vec_safe_is_empty (elts))
4806 if (notify_temp_creation)
4807 return GS_OK;
4808 break;
4811 /* Fetch information about the constructor to direct later processing.
4812 We might want to make static versions of it in various cases, and
4813 can only do so if it known to be a valid constant initializer. */
4814 valid_const_initializer
4815 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4816 &num_unique_nonzero_elements,
4817 &num_ctor_elements, &complete_p);
4819 /* If a const aggregate variable is being initialized, then it
4820 should never be a lose to promote the variable to be static. */
4821 if (valid_const_initializer
4822 && num_nonzero_elements > 1
4823 && TREE_READONLY (object)
4824 && VAR_P (object)
4825 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4826 /* For ctors that have many repeated nonzero elements
4827 represented through RANGE_EXPRs, prefer initializing
4828 those through runtime loops over copies of large amounts
4829 of data from readonly data section. */
4830 && (num_unique_nonzero_elements
4831 > num_nonzero_elements / unique_nonzero_ratio
4832 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4833 <= (unsigned HOST_WIDE_INT) min_unique_size)))
4835 if (notify_temp_creation)
4836 return GS_ERROR;
4837 DECL_INITIAL (object) = ctor;
4838 TREE_STATIC (object) = 1;
4839 if (!DECL_NAME (object))
4840 DECL_NAME (object) = create_tmp_var_name ("C");
4841 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4843 /* ??? C++ doesn't automatically append a .<number> to the
4844 assembler name, and even when it does, it looks at FE private
4845 data structures to figure out what that number should be,
4846 which are not set for this variable. I suppose this is
4847 important for local statics for inline functions, which aren't
4848 "local" in the object file sense. So in order to get a unique
4849 TU-local symbol, we must invoke the lhd version now. */
4850 lhd_set_decl_assembler_name (object);
4852 *expr_p = NULL_TREE;
4853 break;
4856 /* If there are "lots" of initialized elements, even discounting
4857 those that are not address constants (and thus *must* be
4858 computed at runtime), then partition the constructor into
4859 constant and non-constant parts. Block copy the constant
4860 parts in, then generate code for the non-constant parts. */
4861 /* TODO. There's code in cp/typeck.c to do this. */
4863 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4864 /* store_constructor will ignore the clearing of variable-sized
4865 objects. Initializers for such objects must explicitly set
4866 every field that needs to be set. */
4867 cleared = false;
4868 else if (!complete_p)
4869 /* If the constructor isn't complete, clear the whole object
4870 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4872 ??? This ought not to be needed. For any element not present
4873 in the initializer, we should simply set them to zero. Except
4874 we'd need to *find* the elements that are not present, and that
4875 requires trickery to avoid quadratic compile-time behavior in
4876 large cases or excessive memory use in small cases. */
4877 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4878 else if (num_ctor_elements - num_nonzero_elements
4879 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4880 && num_nonzero_elements < num_ctor_elements / 4)
4881 /* If there are "lots" of zeros, it's more efficient to clear
4882 the memory and then set the nonzero elements. */
4883 cleared = true;
4884 else
4885 cleared = false;
4887 /* If there are "lots" of initialized elements, and all of them
4888 are valid address constants, then the entire initializer can
4889 be dropped to memory, and then memcpy'd out. Don't do this
4890 for sparse arrays, though, as it's more efficient to follow
4891 the standard CONSTRUCTOR behavior of memset followed by
4892 individual element initialization. Also don't do this for small
4893 all-zero initializers (which aren't big enough to merit
4894 clearing), and don't try to make bitwise copies of
4895 TREE_ADDRESSABLE types. */
4897 if (valid_const_initializer
4898 && !(cleared || num_nonzero_elements == 0)
4899 && !TREE_ADDRESSABLE (type))
4901 HOST_WIDE_INT size = int_size_in_bytes (type);
4902 unsigned int align;
4904 /* ??? We can still get unbounded array types, at least
4905 from the C++ front end. This seems wrong, but attempt
4906 to work around it for now. */
4907 if (size < 0)
4909 size = int_size_in_bytes (TREE_TYPE (object));
4910 if (size >= 0)
4911 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4914 /* Find the maximum alignment we can assume for the object. */
4915 /* ??? Make use of DECL_OFFSET_ALIGN. */
4916 if (DECL_P (object))
4917 align = DECL_ALIGN (object);
4918 else
4919 align = TYPE_ALIGN (type);
4921 /* Do a block move either if the size is so small as to make
4922 each individual move a sub-unit move on average, or if it
4923 is so large as to make individual moves inefficient. */
4924 if (size > 0
4925 && num_nonzero_elements > 1
4926 /* For ctors that have many repeated nonzero elements
4927 represented through RANGE_EXPRs, prefer initializing
4928 those through runtime loops over copies of large amounts
4929 of data from readonly data section. */
4930 && (num_unique_nonzero_elements
4931 > num_nonzero_elements / unique_nonzero_ratio
4932 || size <= min_unique_size)
4933 && (size < num_nonzero_elements
4934 || !can_move_by_pieces (size, align)))
4936 if (notify_temp_creation)
4937 return GS_ERROR;
4939 walk_tree (&ctor, force_labels_r, NULL, NULL);
4940 ctor = tree_output_constant_def (ctor);
4941 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4942 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4943 TREE_OPERAND (*expr_p, 1) = ctor;
4945 /* This is no longer an assignment of a CONSTRUCTOR, but
4946 we still may have processing to do on the LHS. So
4947 pretend we didn't do anything here to let that happen. */
4948 return GS_UNHANDLED;
4952 /* If the target is volatile, we have non-zero elements and more than
4953 one field to assign, initialize the target from a temporary. */
4954 if (TREE_THIS_VOLATILE (object)
4955 && !TREE_ADDRESSABLE (type)
4956 && num_nonzero_elements > 0
4957 && vec_safe_length (elts) > 1)
4959 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4960 TREE_OPERAND (*expr_p, 0) = temp;
4961 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4962 *expr_p,
4963 build2 (MODIFY_EXPR, void_type_node,
4964 object, temp));
4965 return GS_OK;
4968 if (notify_temp_creation)
4969 return GS_OK;
4971 /* If there are nonzero elements and if needed, pre-evaluate to capture
4972 elements overlapping with the lhs into temporaries. We must do this
4973 before clearing to fetch the values before they are zeroed-out. */
4974 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4976 preeval_data.lhs_base_decl = get_base_address (object);
4977 if (!DECL_P (preeval_data.lhs_base_decl))
4978 preeval_data.lhs_base_decl = NULL;
4979 preeval_data.lhs_alias_set = get_alias_set (object);
4981 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4982 pre_p, post_p, &preeval_data);
4985 bool ctor_has_side_effects_p
4986 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4988 if (cleared)
4990 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4991 Note that we still have to gimplify, in order to handle the
4992 case of variable sized types. Avoid shared tree structures. */
4993 CONSTRUCTOR_ELTS (ctor) = NULL;
4994 TREE_SIDE_EFFECTS (ctor) = 0;
4995 object = unshare_expr (object);
4996 gimplify_stmt (expr_p, pre_p);
4999 /* If we have not block cleared the object, or if there are nonzero
5000 elements in the constructor, or if the constructor has side effects,
5001 add assignments to the individual scalar fields of the object. */
5002 if (!cleared
5003 || num_nonzero_elements > 0
5004 || ctor_has_side_effects_p)
5005 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5007 *expr_p = NULL_TREE;
5009 break;
5011 case COMPLEX_TYPE:
5013 tree r, i;
5015 if (notify_temp_creation)
5016 return GS_OK;
5018 /* Extract the real and imaginary parts out of the ctor. */
5019 gcc_assert (elts->length () == 2);
5020 r = (*elts)[0].value;
5021 i = (*elts)[1].value;
5022 if (r == NULL || i == NULL)
5024 tree zero = build_zero_cst (TREE_TYPE (type));
5025 if (r == NULL)
5026 r = zero;
5027 if (i == NULL)
5028 i = zero;
5031 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5032 represent creation of a complex value. */
5033 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5035 ctor = build_complex (type, r, i);
5036 TREE_OPERAND (*expr_p, 1) = ctor;
5038 else
5040 ctor = build2 (COMPLEX_EXPR, type, r, i);
5041 TREE_OPERAND (*expr_p, 1) = ctor;
5042 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5043 pre_p,
5044 post_p,
5045 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5046 fb_rvalue);
5049 break;
5051 case VECTOR_TYPE:
5053 unsigned HOST_WIDE_INT ix;
5054 constructor_elt *ce;
5056 if (notify_temp_creation)
5057 return GS_OK;
5059 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5060 if (TREE_CONSTANT (ctor))
5062 bool constant_p = true;
5063 tree value;
5065 /* Even when ctor is constant, it might contain non-*_CST
5066 elements, such as addresses or trapping values like
5067 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5068 in VECTOR_CST nodes. */
5069 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5070 if (!CONSTANT_CLASS_P (value))
5072 constant_p = false;
5073 break;
5076 if (constant_p)
5078 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5079 break;
5082 TREE_CONSTANT (ctor) = 0;
5085 /* Vector types use CONSTRUCTOR all the way through gimple
5086 compilation as a general initializer. */
5087 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5089 enum gimplify_status tret;
5090 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5091 fb_rvalue);
5092 if (tret == GS_ERROR)
5093 ret = GS_ERROR;
5094 else if (TREE_STATIC (ctor)
5095 && !initializer_constant_valid_p (ce->value,
5096 TREE_TYPE (ce->value)))
5097 TREE_STATIC (ctor) = 0;
5099 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5100 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5102 break;
5104 default:
5105 /* So how did we get a CONSTRUCTOR for a scalar type? */
5106 gcc_unreachable ();
5109 if (ret == GS_ERROR)
5110 return GS_ERROR;
5111 /* If we have gimplified both sides of the initializer but have
5112 not emitted an assignment, do so now. */
5113 if (*expr_p)
5115 tree lhs = TREE_OPERAND (*expr_p, 0);
5116 tree rhs = TREE_OPERAND (*expr_p, 1);
5117 if (want_value && object == lhs)
5118 lhs = unshare_expr (lhs);
5119 gassign *init = gimple_build_assign (lhs, rhs);
5120 gimplify_seq_add_stmt (pre_p, init);
5122 if (want_value)
5124 *expr_p = object;
5125 return GS_OK;
5127 else
5129 *expr_p = NULL;
5130 return GS_ALL_DONE;
5134 /* Given a pointer value OP0, return a simplified version of an
5135 indirection through OP0, or NULL_TREE if no simplification is
5136 possible. This may only be applied to a rhs of an expression.
5137 Note that the resulting type may be different from the type pointed
5138 to in the sense that it is still compatible from the langhooks
5139 point of view. */
5141 static tree
5142 gimple_fold_indirect_ref_rhs (tree t)
5144 return gimple_fold_indirect_ref (t);
5147 /* Subroutine of gimplify_modify_expr to do simplifications of
5148 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5149 something changes. */
5151 static enum gimplify_status
5152 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5153 gimple_seq *pre_p, gimple_seq *post_p,
5154 bool want_value)
5156 enum gimplify_status ret = GS_UNHANDLED;
5157 bool changed;
5161 changed = false;
5162 switch (TREE_CODE (*from_p))
5164 case VAR_DECL:
5165 /* If we're assigning from a read-only variable initialized with
5166 a constructor, do the direct assignment from the constructor,
5167 but only if neither source nor target are volatile since this
5168 latter assignment might end up being done on a per-field basis. */
5169 if (DECL_INITIAL (*from_p)
5170 && TREE_READONLY (*from_p)
5171 && !TREE_THIS_VOLATILE (*from_p)
5172 && !TREE_THIS_VOLATILE (*to_p)
5173 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5175 tree old_from = *from_p;
5176 enum gimplify_status subret;
5178 /* Move the constructor into the RHS. */
5179 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5181 /* Let's see if gimplify_init_constructor will need to put
5182 it in memory. */
5183 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5184 false, true);
5185 if (subret == GS_ERROR)
5187 /* If so, revert the change. */
5188 *from_p = old_from;
5190 else
5192 ret = GS_OK;
5193 changed = true;
5196 break;
5197 case INDIRECT_REF:
5199 /* If we have code like
5201 *(const A*)(A*)&x
5203 where the type of "x" is a (possibly cv-qualified variant
5204 of "A"), treat the entire expression as identical to "x".
5205 This kind of code arises in C++ when an object is bound
5206 to a const reference, and if "x" is a TARGET_EXPR we want
5207 to take advantage of the optimization below. */
5208 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5209 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5210 if (t)
5212 if (TREE_THIS_VOLATILE (t) != volatile_p)
5214 if (DECL_P (t))
5215 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5216 build_fold_addr_expr (t));
5217 if (REFERENCE_CLASS_P (t))
5218 TREE_THIS_VOLATILE (t) = volatile_p;
5220 *from_p = t;
5221 ret = GS_OK;
5222 changed = true;
5224 break;
5227 case TARGET_EXPR:
5229 /* If we are initializing something from a TARGET_EXPR, strip the
5230 TARGET_EXPR and initialize it directly, if possible. This can't
5231 be done if the initializer is void, since that implies that the
5232 temporary is set in some non-trivial way.
5234 ??? What about code that pulls out the temp and uses it
5235 elsewhere? I think that such code never uses the TARGET_EXPR as
5236 an initializer. If I'm wrong, we'll die because the temp won't
5237 have any RTL. In that case, I guess we'll need to replace
5238 references somehow. */
5239 tree init = TARGET_EXPR_INITIAL (*from_p);
5241 if (init
5242 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5243 || !TARGET_EXPR_NO_ELIDE (*from_p))
5244 && !VOID_TYPE_P (TREE_TYPE (init)))
5246 *from_p = init;
5247 ret = GS_OK;
5248 changed = true;
5251 break;
5253 case COMPOUND_EXPR:
5254 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5255 caught. */
5256 gimplify_compound_expr (from_p, pre_p, true);
5257 ret = GS_OK;
5258 changed = true;
5259 break;
5261 case CONSTRUCTOR:
5262 /* If we already made some changes, let the front end have a
5263 crack at this before we break it down. */
5264 if (ret != GS_UNHANDLED)
5265 break;
5266 /* If we're initializing from a CONSTRUCTOR, break this into
5267 individual MODIFY_EXPRs. */
5268 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5269 false);
5271 case COND_EXPR:
5272 /* If we're assigning to a non-register type, push the assignment
5273 down into the branches. This is mandatory for ADDRESSABLE types,
5274 since we cannot generate temporaries for such, but it saves a
5275 copy in other cases as well. */
5276 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5278 /* This code should mirror the code in gimplify_cond_expr. */
5279 enum tree_code code = TREE_CODE (*expr_p);
5280 tree cond = *from_p;
5281 tree result = *to_p;
5283 ret = gimplify_expr (&result, pre_p, post_p,
5284 is_gimple_lvalue, fb_lvalue);
5285 if (ret != GS_ERROR)
5286 ret = GS_OK;
5288 /* If we are going to write RESULT more than once, clear
5289 TREE_READONLY flag, otherwise we might incorrectly promote
5290 the variable to static const and initialize it at compile
5291 time in one of the branches. */
5292 if (VAR_P (result)
5293 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5294 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5295 TREE_READONLY (result) = 0;
5296 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5297 TREE_OPERAND (cond, 1)
5298 = build2 (code, void_type_node, result,
5299 TREE_OPERAND (cond, 1));
5300 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5301 TREE_OPERAND (cond, 2)
5302 = build2 (code, void_type_node, unshare_expr (result),
5303 TREE_OPERAND (cond, 2));
5305 TREE_TYPE (cond) = void_type_node;
5306 recalculate_side_effects (cond);
5308 if (want_value)
5310 gimplify_and_add (cond, pre_p);
5311 *expr_p = unshare_expr (result);
5313 else
5314 *expr_p = cond;
5315 return ret;
5317 break;
5319 case CALL_EXPR:
5320 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5321 return slot so that we don't generate a temporary. */
5322 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5323 && aggregate_value_p (*from_p, *from_p))
5325 bool use_target;
5327 if (!(rhs_predicate_for (*to_p))(*from_p))
5328 /* If we need a temporary, *to_p isn't accurate. */
5329 use_target = false;
5330 /* It's OK to use the return slot directly unless it's an NRV. */
5331 else if (TREE_CODE (*to_p) == RESULT_DECL
5332 && DECL_NAME (*to_p) == NULL_TREE
5333 && needs_to_live_in_memory (*to_p))
5334 use_target = true;
5335 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5336 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5337 /* Don't force regs into memory. */
5338 use_target = false;
5339 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5340 /* It's OK to use the target directly if it's being
5341 initialized. */
5342 use_target = true;
5343 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5344 != INTEGER_CST)
5345 /* Always use the target and thus RSO for variable-sized types.
5346 GIMPLE cannot deal with a variable-sized assignment
5347 embedded in a call statement. */
5348 use_target = true;
5349 else if (TREE_CODE (*to_p) != SSA_NAME
5350 && (!is_gimple_variable (*to_p)
5351 || needs_to_live_in_memory (*to_p)))
5352 /* Don't use the original target if it's already addressable;
5353 if its address escapes, and the called function uses the
5354 NRV optimization, a conforming program could see *to_p
5355 change before the called function returns; see c++/19317.
5356 When optimizing, the return_slot pass marks more functions
5357 as safe after we have escape info. */
5358 use_target = false;
5359 else
5360 use_target = true;
5362 if (use_target)
5364 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5365 mark_addressable (*to_p);
5368 break;
5370 case WITH_SIZE_EXPR:
5371 /* Likewise for calls that return an aggregate of non-constant size,
5372 since we would not be able to generate a temporary at all. */
5373 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5375 *from_p = TREE_OPERAND (*from_p, 0);
5376 /* We don't change ret in this case because the
5377 WITH_SIZE_EXPR might have been added in
5378 gimplify_modify_expr, so returning GS_OK would lead to an
5379 infinite loop. */
5380 changed = true;
5382 break;
5384 /* If we're initializing from a container, push the initialization
5385 inside it. */
5386 case CLEANUP_POINT_EXPR:
5387 case BIND_EXPR:
5388 case STATEMENT_LIST:
5390 tree wrap = *from_p;
5391 tree t;
5393 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5394 fb_lvalue);
5395 if (ret != GS_ERROR)
5396 ret = GS_OK;
5398 t = voidify_wrapper_expr (wrap, *expr_p);
5399 gcc_assert (t == *expr_p);
5401 if (want_value)
5403 gimplify_and_add (wrap, pre_p);
5404 *expr_p = unshare_expr (*to_p);
5406 else
5407 *expr_p = wrap;
5408 return GS_OK;
5411 case COMPOUND_LITERAL_EXPR:
5413 tree complit = TREE_OPERAND (*expr_p, 1);
5414 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5415 tree decl = DECL_EXPR_DECL (decl_s);
5416 tree init = DECL_INITIAL (decl);
5418 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5419 into struct T x = { 0, 1, 2 } if the address of the
5420 compound literal has never been taken. */
5421 if (!TREE_ADDRESSABLE (complit)
5422 && !TREE_ADDRESSABLE (decl)
5423 && init)
5425 *expr_p = copy_node (*expr_p);
5426 TREE_OPERAND (*expr_p, 1) = init;
5427 return GS_OK;
5431 default:
5432 break;
5435 while (changed);
5437 return ret;
5441 /* Return true if T looks like a valid GIMPLE statement. */
5443 static bool
5444 is_gimple_stmt (tree t)
5446 const enum tree_code code = TREE_CODE (t);
5448 switch (code)
5450 case NOP_EXPR:
5451 /* The only valid NOP_EXPR is the empty statement. */
5452 return IS_EMPTY_STMT (t);
5454 case BIND_EXPR:
5455 case COND_EXPR:
5456 /* These are only valid if they're void. */
5457 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5459 case SWITCH_EXPR:
5460 case GOTO_EXPR:
5461 case RETURN_EXPR:
5462 case LABEL_EXPR:
5463 case CASE_LABEL_EXPR:
5464 case TRY_CATCH_EXPR:
5465 case TRY_FINALLY_EXPR:
5466 case EH_FILTER_EXPR:
5467 case CATCH_EXPR:
5468 case ASM_EXPR:
5469 case STATEMENT_LIST:
5470 case OACC_PARALLEL:
5471 case OACC_KERNELS:
5472 case OACC_DATA:
5473 case OACC_HOST_DATA:
5474 case OACC_DECLARE:
5475 case OACC_UPDATE:
5476 case OACC_ENTER_DATA:
5477 case OACC_EXIT_DATA:
5478 case OACC_CACHE:
5479 case OMP_PARALLEL:
5480 case OMP_FOR:
5481 case OMP_SIMD:
5482 case OMP_DISTRIBUTE:
5483 case OACC_LOOP:
5484 case OMP_SECTIONS:
5485 case OMP_SECTION:
5486 case OMP_SINGLE:
5487 case OMP_MASTER:
5488 case OMP_TASKGROUP:
5489 case OMP_ORDERED:
5490 case OMP_CRITICAL:
5491 case OMP_TASK:
5492 case OMP_TARGET:
5493 case OMP_TARGET_DATA:
5494 case OMP_TARGET_UPDATE:
5495 case OMP_TARGET_ENTER_DATA:
5496 case OMP_TARGET_EXIT_DATA:
5497 case OMP_TASKLOOP:
5498 case OMP_TEAMS:
5499 /* These are always void. */
5500 return true;
5502 case CALL_EXPR:
5503 case MODIFY_EXPR:
5504 case PREDICT_EXPR:
5505 /* These are valid regardless of their type. */
5506 return true;
5508 default:
5509 return false;
5514 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5515 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5516 DECL_GIMPLE_REG_P set.
5518 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5519 other, unmodified part of the complex object just before the total store.
5520 As a consequence, if the object is still uninitialized, an undefined value
5521 will be loaded into a register, which may result in a spurious exception
5522 if the register is floating-point and the value happens to be a signaling
5523 NaN for example. Then the fully-fledged complex operations lowering pass
5524 followed by a DCE pass are necessary in order to fix things up. */
5526 static enum gimplify_status
5527 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5528 bool want_value)
5530 enum tree_code code, ocode;
5531 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5533 lhs = TREE_OPERAND (*expr_p, 0);
5534 rhs = TREE_OPERAND (*expr_p, 1);
5535 code = TREE_CODE (lhs);
5536 lhs = TREE_OPERAND (lhs, 0);
5538 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5539 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5540 TREE_NO_WARNING (other) = 1;
5541 other = get_formal_tmp_var (other, pre_p);
5543 realpart = code == REALPART_EXPR ? rhs : other;
5544 imagpart = code == REALPART_EXPR ? other : rhs;
5546 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5547 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5548 else
5549 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5551 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5552 *expr_p = (want_value) ? rhs : NULL_TREE;
5554 return GS_ALL_DONE;
5557 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5559 modify_expr
5560 : varname '=' rhs
5561 | '*' ID '=' rhs
5563 PRE_P points to the list where side effects that must happen before
5564 *EXPR_P should be stored.
5566 POST_P points to the list where side effects that must happen after
5567 *EXPR_P should be stored.
5569 WANT_VALUE is nonzero iff we want to use the value of this expression
5570 in another expression. */
5572 static enum gimplify_status
5573 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5574 bool want_value)
5576 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5577 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5578 enum gimplify_status ret = GS_UNHANDLED;
5579 gimple *assign;
5580 location_t loc = EXPR_LOCATION (*expr_p);
5581 gimple_stmt_iterator gsi;
5583 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5584 || TREE_CODE (*expr_p) == INIT_EXPR);
5586 /* Trying to simplify a clobber using normal logic doesn't work,
5587 so handle it here. */
5588 if (TREE_CLOBBER_P (*from_p))
5590 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5591 if (ret == GS_ERROR)
5592 return ret;
5593 gcc_assert (!want_value);
5594 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5596 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5597 pre_p, post_p);
5598 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5600 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5601 *expr_p = NULL;
5602 return GS_ALL_DONE;
5605 /* Insert pointer conversions required by the middle-end that are not
5606 required by the frontend. This fixes middle-end type checking for
5607 for example gcc.dg/redecl-6.c. */
5608 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5610 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5611 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5612 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5615 /* See if any simplifications can be done based on what the RHS is. */
5616 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5617 want_value);
5618 if (ret != GS_UNHANDLED)
5619 return ret;
5621 /* For zero sized types only gimplify the left hand side and right hand
5622 side as statements and throw away the assignment. Do this after
5623 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5624 types properly. */
5625 if (zero_sized_type (TREE_TYPE (*from_p))
5626 && !want_value
5627 /* Don't do this for calls that return addressable types, expand_call
5628 relies on those having a lhs. */
5629 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5630 && TREE_CODE (*from_p) == CALL_EXPR))
5632 gimplify_stmt (from_p, pre_p);
5633 gimplify_stmt (to_p, pre_p);
5634 *expr_p = NULL_TREE;
5635 return GS_ALL_DONE;
5638 /* If the value being copied is of variable width, compute the length
5639 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5640 before gimplifying any of the operands so that we can resolve any
5641 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5642 the size of the expression to be copied, not of the destination, so
5643 that is what we must do here. */
5644 maybe_with_size_expr (from_p);
5646 /* As a special case, we have to temporarily allow for assignments
5647 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5648 a toplevel statement, when gimplifying the GENERIC expression
5649 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5650 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5652 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5653 prevent gimplify_expr from trying to create a new temporary for
5654 foo's LHS, we tell it that it should only gimplify until it
5655 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5656 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5657 and all we need to do here is set 'a' to be its LHS. */
5659 /* Gimplify the RHS first for C++17 and bug 71104. */
5660 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5661 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5662 if (ret == GS_ERROR)
5663 return ret;
5665 /* Then gimplify the LHS. */
5666 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5667 twice we have to make sure to gimplify into non-SSA as otherwise
5668 the abnormal edge added later will make those defs not dominate
5669 their uses.
5670 ??? Technically this applies only to the registers used in the
5671 resulting non-register *TO_P. */
5672 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5673 if (saved_into_ssa
5674 && TREE_CODE (*from_p) == CALL_EXPR
5675 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5676 gimplify_ctxp->into_ssa = false;
5677 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5678 gimplify_ctxp->into_ssa = saved_into_ssa;
5679 if (ret == GS_ERROR)
5680 return ret;
5682 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5683 guess for the predicate was wrong. */
5684 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5685 if (final_pred != initial_pred)
5687 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5688 if (ret == GS_ERROR)
5689 return ret;
5692 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5693 size as argument to the call. */
5694 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5696 tree call = TREE_OPERAND (*from_p, 0);
5697 tree vlasize = TREE_OPERAND (*from_p, 1);
5699 if (TREE_CODE (call) == CALL_EXPR
5700 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5702 int nargs = call_expr_nargs (call);
5703 tree type = TREE_TYPE (call);
5704 tree ap = CALL_EXPR_ARG (call, 0);
5705 tree tag = CALL_EXPR_ARG (call, 1);
5706 tree aptag = CALL_EXPR_ARG (call, 2);
5707 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5708 IFN_VA_ARG, type,
5709 nargs + 1, ap, tag,
5710 aptag, vlasize);
5711 TREE_OPERAND (*from_p, 0) = newcall;
5715 /* Now see if the above changed *from_p to something we handle specially. */
5716 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5717 want_value);
5718 if (ret != GS_UNHANDLED)
5719 return ret;
5721 /* If we've got a variable sized assignment between two lvalues (i.e. does
5722 not involve a call), then we can make things a bit more straightforward
5723 by converting the assignment to memcpy or memset. */
5724 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5726 tree from = TREE_OPERAND (*from_p, 0);
5727 tree size = TREE_OPERAND (*from_p, 1);
5729 if (TREE_CODE (from) == CONSTRUCTOR)
5730 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5732 if (is_gimple_addressable (from))
5734 *from_p = from;
5735 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5736 pre_p);
5740 /* Transform partial stores to non-addressable complex variables into
5741 total stores. This allows us to use real instead of virtual operands
5742 for these variables, which improves optimization. */
5743 if ((TREE_CODE (*to_p) == REALPART_EXPR
5744 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5745 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5746 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5748 /* Try to alleviate the effects of the gimplification creating artificial
5749 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5750 make sure not to create DECL_DEBUG_EXPR links across functions. */
5751 if (!gimplify_ctxp->into_ssa
5752 && VAR_P (*from_p)
5753 && DECL_IGNORED_P (*from_p)
5754 && DECL_P (*to_p)
5755 && !DECL_IGNORED_P (*to_p)
5756 && decl_function_context (*to_p) == current_function_decl
5757 && decl_function_context (*from_p) == current_function_decl)
5759 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5760 DECL_NAME (*from_p)
5761 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5762 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5763 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5766 if (want_value && TREE_THIS_VOLATILE (*to_p))
5767 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5769 if (TREE_CODE (*from_p) == CALL_EXPR)
5771 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5772 instead of a GIMPLE_ASSIGN. */
5773 gcall *call_stmt;
5774 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5776 /* Gimplify internal functions created in the FEs. */
5777 int nargs = call_expr_nargs (*from_p), i;
5778 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5779 auto_vec<tree> vargs (nargs);
5781 for (i = 0; i < nargs; i++)
5783 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5784 EXPR_LOCATION (*from_p));
5785 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5787 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5788 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5789 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5791 else
5793 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5794 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5795 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5796 tree fndecl = get_callee_fndecl (*from_p);
5797 if (fndecl
5798 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5799 && call_expr_nargs (*from_p) == 3)
5800 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5801 CALL_EXPR_ARG (*from_p, 0),
5802 CALL_EXPR_ARG (*from_p, 1),
5803 CALL_EXPR_ARG (*from_p, 2));
5804 else
5806 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5809 notice_special_calls (call_stmt);
5810 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5811 gimple_call_set_lhs (call_stmt, *to_p);
5812 else if (TREE_CODE (*to_p) == SSA_NAME)
5813 /* The above is somewhat premature, avoid ICEing later for a
5814 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5815 ??? This doesn't make it a default-def. */
5816 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5818 assign = call_stmt;
5820 else
5822 assign = gimple_build_assign (*to_p, *from_p);
5823 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5824 if (COMPARISON_CLASS_P (*from_p))
5825 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5828 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5830 /* We should have got an SSA name from the start. */
5831 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5832 || ! gimple_in_ssa_p (cfun));
5835 gimplify_seq_add_stmt (pre_p, assign);
5836 gsi = gsi_last (*pre_p);
5837 maybe_fold_stmt (&gsi);
5839 if (want_value)
5841 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5842 return GS_OK;
5844 else
5845 *expr_p = NULL;
5847 return GS_ALL_DONE;
5850 /* Gimplify a comparison between two variable-sized objects. Do this
5851 with a call to BUILT_IN_MEMCMP. */
5853 static enum gimplify_status
5854 gimplify_variable_sized_compare (tree *expr_p)
5856 location_t loc = EXPR_LOCATION (*expr_p);
5857 tree op0 = TREE_OPERAND (*expr_p, 0);
5858 tree op1 = TREE_OPERAND (*expr_p, 1);
5859 tree t, arg, dest, src, expr;
5861 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5862 arg = unshare_expr (arg);
5863 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5864 src = build_fold_addr_expr_loc (loc, op1);
5865 dest = build_fold_addr_expr_loc (loc, op0);
5866 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5867 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5869 expr
5870 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5871 SET_EXPR_LOCATION (expr, loc);
5872 *expr_p = expr;
5874 return GS_OK;
5877 /* Gimplify a comparison between two aggregate objects of integral scalar
5878 mode as a comparison between the bitwise equivalent scalar values. */
5880 static enum gimplify_status
5881 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5883 location_t loc = EXPR_LOCATION (*expr_p);
5884 tree op0 = TREE_OPERAND (*expr_p, 0);
5885 tree op1 = TREE_OPERAND (*expr_p, 1);
5887 tree type = TREE_TYPE (op0);
5888 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5890 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5891 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5893 *expr_p
5894 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5896 return GS_OK;
5899 /* Gimplify an expression sequence. This function gimplifies each
5900 expression and rewrites the original expression with the last
5901 expression of the sequence in GIMPLE form.
5903 PRE_P points to the list where the side effects for all the
5904 expressions in the sequence will be emitted.
5906 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5908 static enum gimplify_status
5909 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5911 tree t = *expr_p;
5915 tree *sub_p = &TREE_OPERAND (t, 0);
5917 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5918 gimplify_compound_expr (sub_p, pre_p, false);
5919 else
5920 gimplify_stmt (sub_p, pre_p);
5922 t = TREE_OPERAND (t, 1);
5924 while (TREE_CODE (t) == COMPOUND_EXPR);
5926 *expr_p = t;
5927 if (want_value)
5928 return GS_OK;
5929 else
5931 gimplify_stmt (expr_p, pre_p);
5932 return GS_ALL_DONE;
5936 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5937 gimplify. After gimplification, EXPR_P will point to a new temporary
5938 that holds the original value of the SAVE_EXPR node.
5940 PRE_P points to the list where side effects that must happen before
5941 *EXPR_P should be stored. */
5943 static enum gimplify_status
5944 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5946 enum gimplify_status ret = GS_ALL_DONE;
5947 tree val;
5949 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5950 val = TREE_OPERAND (*expr_p, 0);
5952 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5953 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5955 /* The operand may be a void-valued expression. It is
5956 being executed only for its side-effects. */
5957 if (TREE_TYPE (val) == void_type_node)
5959 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5960 is_gimple_stmt, fb_none);
5961 val = NULL;
5963 else
5964 /* The temporary may not be an SSA name as later abnormal and EH
5965 control flow may invalidate use/def domination. When in SSA
5966 form then assume there are no such issues and SAVE_EXPRs only
5967 appear via GENERIC foldings. */
5968 val = get_initialized_tmp_var (val, pre_p, post_p,
5969 gimple_in_ssa_p (cfun));
5971 TREE_OPERAND (*expr_p, 0) = val;
5972 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5975 *expr_p = val;
5977 return ret;
5980 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5982 unary_expr
5983 : ...
5984 | '&' varname
5987 PRE_P points to the list where side effects that must happen before
5988 *EXPR_P should be stored.
5990 POST_P points to the list where side effects that must happen after
5991 *EXPR_P should be stored. */
5993 static enum gimplify_status
5994 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5996 tree expr = *expr_p;
5997 tree op0 = TREE_OPERAND (expr, 0);
5998 enum gimplify_status ret;
5999 location_t loc = EXPR_LOCATION (*expr_p);
6001 switch (TREE_CODE (op0))
6003 case INDIRECT_REF:
6004 do_indirect_ref:
6005 /* Check if we are dealing with an expression of the form '&*ptr'.
6006 While the front end folds away '&*ptr' into 'ptr', these
6007 expressions may be generated internally by the compiler (e.g.,
6008 builtins like __builtin_va_end). */
6009 /* Caution: the silent array decomposition semantics we allow for
6010 ADDR_EXPR means we can't always discard the pair. */
6011 /* Gimplification of the ADDR_EXPR operand may drop
6012 cv-qualification conversions, so make sure we add them if
6013 needed. */
6015 tree op00 = TREE_OPERAND (op0, 0);
6016 tree t_expr = TREE_TYPE (expr);
6017 tree t_op00 = TREE_TYPE (op00);
6019 if (!useless_type_conversion_p (t_expr, t_op00))
6020 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6021 *expr_p = op00;
6022 ret = GS_OK;
6024 break;
6026 case VIEW_CONVERT_EXPR:
6027 /* Take the address of our operand and then convert it to the type of
6028 this ADDR_EXPR.
6030 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6031 all clear. The impact of this transformation is even less clear. */
6033 /* If the operand is a useless conversion, look through it. Doing so
6034 guarantees that the ADDR_EXPR and its operand will remain of the
6035 same type. */
6036 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6037 op0 = TREE_OPERAND (op0, 0);
6039 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6040 build_fold_addr_expr_loc (loc,
6041 TREE_OPERAND (op0, 0)));
6042 ret = GS_OK;
6043 break;
6045 case MEM_REF:
6046 if (integer_zerop (TREE_OPERAND (op0, 1)))
6047 goto do_indirect_ref;
6049 /* fall through */
6051 default:
6052 /* If we see a call to a declared builtin or see its address
6053 being taken (we can unify those cases here) then we can mark
6054 the builtin for implicit generation by GCC. */
6055 if (TREE_CODE (op0) == FUNCTION_DECL
6056 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6057 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6058 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6060 /* We use fb_either here because the C frontend sometimes takes
6061 the address of a call that returns a struct; see
6062 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6063 the implied temporary explicit. */
6065 /* Make the operand addressable. */
6066 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6067 is_gimple_addressable, fb_either);
6068 if (ret == GS_ERROR)
6069 break;
6071 /* Then mark it. Beware that it may not be possible to do so directly
6072 if a temporary has been created by the gimplification. */
6073 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6075 op0 = TREE_OPERAND (expr, 0);
6077 /* For various reasons, the gimplification of the expression
6078 may have made a new INDIRECT_REF. */
6079 if (TREE_CODE (op0) == INDIRECT_REF)
6080 goto do_indirect_ref;
6082 mark_addressable (TREE_OPERAND (expr, 0));
6084 /* The FEs may end up building ADDR_EXPRs early on a decl with
6085 an incomplete type. Re-build ADDR_EXPRs in canonical form
6086 here. */
6087 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6088 *expr_p = build_fold_addr_expr (op0);
6090 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6091 recompute_tree_invariant_for_addr_expr (*expr_p);
6093 /* If we re-built the ADDR_EXPR add a conversion to the original type
6094 if required. */
6095 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6096 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6098 break;
6101 return ret;
6104 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6105 value; output operands should be a gimple lvalue. */
6107 static enum gimplify_status
6108 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6110 tree expr;
6111 int noutputs;
6112 const char **oconstraints;
6113 int i;
6114 tree link;
6115 const char *constraint;
6116 bool allows_mem, allows_reg, is_inout;
6117 enum gimplify_status ret, tret;
6118 gasm *stmt;
6119 vec<tree, va_gc> *inputs;
6120 vec<tree, va_gc> *outputs;
6121 vec<tree, va_gc> *clobbers;
6122 vec<tree, va_gc> *labels;
6123 tree link_next;
6125 expr = *expr_p;
6126 noutputs = list_length (ASM_OUTPUTS (expr));
6127 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6129 inputs = NULL;
6130 outputs = NULL;
6131 clobbers = NULL;
6132 labels = NULL;
6134 ret = GS_ALL_DONE;
6135 link_next = NULL_TREE;
6136 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6138 bool ok;
6139 size_t constraint_len;
6141 link_next = TREE_CHAIN (link);
6143 oconstraints[i]
6144 = constraint
6145 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6146 constraint_len = strlen (constraint);
6147 if (constraint_len == 0)
6148 continue;
6150 ok = parse_output_constraint (&constraint, i, 0, 0,
6151 &allows_mem, &allows_reg, &is_inout);
6152 if (!ok)
6154 ret = GS_ERROR;
6155 is_inout = false;
6158 if (!allows_reg && allows_mem)
6159 mark_addressable (TREE_VALUE (link));
6161 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6162 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6163 fb_lvalue | fb_mayfail);
6164 if (tret == GS_ERROR)
6166 error ("invalid lvalue in asm output %d", i);
6167 ret = tret;
6170 /* If the constraint does not allow memory make sure we gimplify
6171 it to a register if it is not already but its base is. This
6172 happens for complex and vector components. */
6173 if (!allows_mem)
6175 tree op = TREE_VALUE (link);
6176 if (! is_gimple_val (op)
6177 && is_gimple_reg_type (TREE_TYPE (op))
6178 && is_gimple_reg (get_base_address (op)))
6180 tree tem = create_tmp_reg (TREE_TYPE (op));
6181 tree ass;
6182 if (is_inout)
6184 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6185 tem, unshare_expr (op));
6186 gimplify_and_add (ass, pre_p);
6188 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6189 gimplify_and_add (ass, post_p);
6191 TREE_VALUE (link) = tem;
6192 tret = GS_OK;
6196 vec_safe_push (outputs, link);
6197 TREE_CHAIN (link) = NULL_TREE;
6199 if (is_inout)
6201 /* An input/output operand. To give the optimizers more
6202 flexibility, split it into separate input and output
6203 operands. */
6204 tree input;
6205 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6206 char buf[11];
6208 /* Turn the in/out constraint into an output constraint. */
6209 char *p = xstrdup (constraint);
6210 p[0] = '=';
6211 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6213 /* And add a matching input constraint. */
6214 if (allows_reg)
6216 sprintf (buf, "%u", i);
6218 /* If there are multiple alternatives in the constraint,
6219 handle each of them individually. Those that allow register
6220 will be replaced with operand number, the others will stay
6221 unchanged. */
6222 if (strchr (p, ',') != NULL)
6224 size_t len = 0, buflen = strlen (buf);
6225 char *beg, *end, *str, *dst;
6227 for (beg = p + 1;;)
6229 end = strchr (beg, ',');
6230 if (end == NULL)
6231 end = strchr (beg, '\0');
6232 if ((size_t) (end - beg) < buflen)
6233 len += buflen + 1;
6234 else
6235 len += end - beg + 1;
6236 if (*end)
6237 beg = end + 1;
6238 else
6239 break;
6242 str = (char *) alloca (len);
6243 for (beg = p + 1, dst = str;;)
6245 const char *tem;
6246 bool mem_p, reg_p, inout_p;
6248 end = strchr (beg, ',');
6249 if (end)
6250 *end = '\0';
6251 beg[-1] = '=';
6252 tem = beg - 1;
6253 parse_output_constraint (&tem, i, 0, 0,
6254 &mem_p, &reg_p, &inout_p);
6255 if (dst != str)
6256 *dst++ = ',';
6257 if (reg_p)
6259 memcpy (dst, buf, buflen);
6260 dst += buflen;
6262 else
6264 if (end)
6265 len = end - beg;
6266 else
6267 len = strlen (beg);
6268 memcpy (dst, beg, len);
6269 dst += len;
6271 if (end)
6272 beg = end + 1;
6273 else
6274 break;
6276 *dst = '\0';
6277 input = build_string (dst - str, str);
6279 else
6280 input = build_string (strlen (buf), buf);
6282 else
6283 input = build_string (constraint_len - 1, constraint + 1);
6285 free (p);
6287 input = build_tree_list (build_tree_list (NULL_TREE, input),
6288 unshare_expr (TREE_VALUE (link)));
6289 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6293 link_next = NULL_TREE;
6294 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6296 link_next = TREE_CHAIN (link);
6297 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6298 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6299 oconstraints, &allows_mem, &allows_reg);
6301 /* If we can't make copies, we can only accept memory. */
6302 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6304 if (allows_mem)
6305 allows_reg = 0;
6306 else
6308 error ("impossible constraint in %<asm%>");
6309 error ("non-memory input %d must stay in memory", i);
6310 return GS_ERROR;
6314 /* If the operand is a memory input, it should be an lvalue. */
6315 if (!allows_reg && allows_mem)
6317 tree inputv = TREE_VALUE (link);
6318 STRIP_NOPS (inputv);
6319 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6320 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6321 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6322 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6323 || TREE_CODE (inputv) == MODIFY_EXPR)
6324 TREE_VALUE (link) = error_mark_node;
6325 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6326 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6327 if (tret != GS_ERROR)
6329 /* Unlike output operands, memory inputs are not guaranteed
6330 to be lvalues by the FE, and while the expressions are
6331 marked addressable there, if it is e.g. a statement
6332 expression, temporaries in it might not end up being
6333 addressable. They might be already used in the IL and thus
6334 it is too late to make them addressable now though. */
6335 tree x = TREE_VALUE (link);
6336 while (handled_component_p (x))
6337 x = TREE_OPERAND (x, 0);
6338 if (TREE_CODE (x) == MEM_REF
6339 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6340 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6341 if ((VAR_P (x)
6342 || TREE_CODE (x) == PARM_DECL
6343 || TREE_CODE (x) == RESULT_DECL)
6344 && !TREE_ADDRESSABLE (x)
6345 && is_gimple_reg (x))
6347 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6348 input_location), 0,
6349 "memory input %d is not directly addressable",
6351 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6354 mark_addressable (TREE_VALUE (link));
6355 if (tret == GS_ERROR)
6357 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6358 "memory input %d is not directly addressable", i);
6359 ret = tret;
6362 else
6364 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6365 is_gimple_asm_val, fb_rvalue);
6366 if (tret == GS_ERROR)
6367 ret = tret;
6370 TREE_CHAIN (link) = NULL_TREE;
6371 vec_safe_push (inputs, link);
6374 link_next = NULL_TREE;
6375 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6377 link_next = TREE_CHAIN (link);
6378 TREE_CHAIN (link) = NULL_TREE;
6379 vec_safe_push (clobbers, link);
6382 link_next = NULL_TREE;
6383 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6385 link_next = TREE_CHAIN (link);
6386 TREE_CHAIN (link) = NULL_TREE;
6387 vec_safe_push (labels, link);
6390 /* Do not add ASMs with errors to the gimple IL stream. */
6391 if (ret != GS_ERROR)
6393 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6394 inputs, outputs, clobbers, labels);
6396 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6397 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6398 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6400 gimplify_seq_add_stmt (pre_p, stmt);
6403 return ret;
6406 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6407 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6408 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6409 return to this function.
6411 FIXME should we complexify the prequeue handling instead? Or use flags
6412 for all the cleanups and let the optimizer tighten them up? The current
6413 code seems pretty fragile; it will break on a cleanup within any
6414 non-conditional nesting. But any such nesting would be broken, anyway;
6415 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6416 and continues out of it. We can do that at the RTL level, though, so
6417 having an optimizer to tighten up try/finally regions would be a Good
6418 Thing. */
6420 static enum gimplify_status
6421 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6423 gimple_stmt_iterator iter;
6424 gimple_seq body_sequence = NULL;
6426 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6428 /* We only care about the number of conditions between the innermost
6429 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6430 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6431 int old_conds = gimplify_ctxp->conditions;
6432 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6433 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6434 gimplify_ctxp->conditions = 0;
6435 gimplify_ctxp->conditional_cleanups = NULL;
6436 gimplify_ctxp->in_cleanup_point_expr = true;
6438 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6440 gimplify_ctxp->conditions = old_conds;
6441 gimplify_ctxp->conditional_cleanups = old_cleanups;
6442 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6444 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6446 gimple *wce = gsi_stmt (iter);
6448 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6450 if (gsi_one_before_end_p (iter))
6452 /* Note that gsi_insert_seq_before and gsi_remove do not
6453 scan operands, unlike some other sequence mutators. */
6454 if (!gimple_wce_cleanup_eh_only (wce))
6455 gsi_insert_seq_before_without_update (&iter,
6456 gimple_wce_cleanup (wce),
6457 GSI_SAME_STMT);
6458 gsi_remove (&iter, true);
6459 break;
6461 else
6463 gtry *gtry;
6464 gimple_seq seq;
6465 enum gimple_try_flags kind;
6467 if (gimple_wce_cleanup_eh_only (wce))
6468 kind = GIMPLE_TRY_CATCH;
6469 else
6470 kind = GIMPLE_TRY_FINALLY;
6471 seq = gsi_split_seq_after (iter);
6473 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6474 /* Do not use gsi_replace here, as it may scan operands.
6475 We want to do a simple structural modification only. */
6476 gsi_set_stmt (&iter, gtry);
6477 iter = gsi_start (gtry->eval);
6480 else
6481 gsi_next (&iter);
6484 gimplify_seq_add_seq (pre_p, body_sequence);
6485 if (temp)
6487 *expr_p = temp;
6488 return GS_OK;
6490 else
6492 *expr_p = NULL;
6493 return GS_ALL_DONE;
6497 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6498 is the cleanup action required. EH_ONLY is true if the cleanup should
6499 only be executed if an exception is thrown, not on normal exit.
6500 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6501 only valid for clobbers. */
6503 static void
6504 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6505 bool force_uncond = false)
6507 gimple *wce;
6508 gimple_seq cleanup_stmts = NULL;
6510 /* Errors can result in improperly nested cleanups. Which results in
6511 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6512 if (seen_error ())
6513 return;
6515 if (gimple_conditional_context ())
6517 /* If we're in a conditional context, this is more complex. We only
6518 want to run the cleanup if we actually ran the initialization that
6519 necessitates it, but we want to run it after the end of the
6520 conditional context. So we wrap the try/finally around the
6521 condition and use a flag to determine whether or not to actually
6522 run the destructor. Thus
6524 test ? f(A()) : 0
6526 becomes (approximately)
6528 flag = 0;
6529 try {
6530 if (test) { A::A(temp); flag = 1; val = f(temp); }
6531 else { val = 0; }
6532 } finally {
6533 if (flag) A::~A(temp);
6537 if (force_uncond)
6539 gimplify_stmt (&cleanup, &cleanup_stmts);
6540 wce = gimple_build_wce (cleanup_stmts);
6541 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6543 else
6545 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6546 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6547 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6549 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6550 gimplify_stmt (&cleanup, &cleanup_stmts);
6551 wce = gimple_build_wce (cleanup_stmts);
6553 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6554 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6555 gimplify_seq_add_stmt (pre_p, ftrue);
6557 /* Because of this manipulation, and the EH edges that jump
6558 threading cannot redirect, the temporary (VAR) will appear
6559 to be used uninitialized. Don't warn. */
6560 TREE_NO_WARNING (var) = 1;
6563 else
6565 gimplify_stmt (&cleanup, &cleanup_stmts);
6566 wce = gimple_build_wce (cleanup_stmts);
6567 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6568 gimplify_seq_add_stmt (pre_p, wce);
6572 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6574 static enum gimplify_status
6575 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6577 tree targ = *expr_p;
6578 tree temp = TARGET_EXPR_SLOT (targ);
6579 tree init = TARGET_EXPR_INITIAL (targ);
6580 enum gimplify_status ret;
6582 bool unpoison_empty_seq = false;
6583 gimple_stmt_iterator unpoison_it;
6585 if (init)
6587 tree cleanup = NULL_TREE;
6589 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6590 to the temps list. Handle also variable length TARGET_EXPRs. */
6591 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6593 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6594 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6595 gimplify_vla_decl (temp, pre_p);
6597 else
6599 /* Save location where we need to place unpoisoning. It's possible
6600 that a variable will be converted to needs_to_live_in_memory. */
6601 unpoison_it = gsi_last (*pre_p);
6602 unpoison_empty_seq = gsi_end_p (unpoison_it);
6604 gimple_add_tmp_var (temp);
6607 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6608 expression is supposed to initialize the slot. */
6609 if (VOID_TYPE_P (TREE_TYPE (init)))
6610 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6611 else
6613 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6614 init = init_expr;
6615 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6616 init = NULL;
6617 ggc_free (init_expr);
6619 if (ret == GS_ERROR)
6621 /* PR c++/28266 Make sure this is expanded only once. */
6622 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6623 return GS_ERROR;
6625 if (init)
6626 gimplify_and_add (init, pre_p);
6628 /* If needed, push the cleanup for the temp. */
6629 if (TARGET_EXPR_CLEANUP (targ))
6631 if (CLEANUP_EH_ONLY (targ))
6632 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6633 CLEANUP_EH_ONLY (targ), pre_p);
6634 else
6635 cleanup = TARGET_EXPR_CLEANUP (targ);
6638 /* Add a clobber for the temporary going out of scope, like
6639 gimplify_bind_expr. */
6640 if (gimplify_ctxp->in_cleanup_point_expr
6641 && needs_to_live_in_memory (temp))
6643 if (flag_stack_reuse == SR_ALL)
6645 tree clobber = build_clobber (TREE_TYPE (temp));
6646 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6647 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6649 if (asan_poisoned_variables
6650 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6651 && dbg_cnt (asan_use_after_scope)
6652 && !gimplify_omp_ctxp)
6654 tree asan_cleanup = build_asan_poison_call_expr (temp);
6655 if (asan_cleanup)
6657 if (unpoison_empty_seq)
6658 unpoison_it = gsi_start (*pre_p);
6660 asan_poison_variable (temp, false, &unpoison_it,
6661 unpoison_empty_seq);
6662 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6666 if (cleanup)
6667 gimple_push_cleanup (temp, cleanup, false, pre_p);
6669 /* Only expand this once. */
6670 TREE_OPERAND (targ, 3) = init;
6671 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6673 else
6674 /* We should have expanded this before. */
6675 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6677 *expr_p = temp;
6678 return GS_OK;
6681 /* Gimplification of expression trees. */
6683 /* Gimplify an expression which appears at statement context. The
6684 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6685 NULL, a new sequence is allocated.
6687 Return true if we actually added a statement to the queue. */
6689 bool
6690 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6692 gimple_seq_node last;
6694 last = gimple_seq_last (*seq_p);
6695 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6696 return last != gimple_seq_last (*seq_p);
6699 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6700 to CTX. If entries already exist, force them to be some flavor of private.
6701 If there is no enclosing parallel, do nothing. */
6703 void
6704 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6706 splay_tree_node n;
6708 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6709 return;
6713 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6714 if (n != NULL)
6716 if (n->value & GOVD_SHARED)
6717 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6718 else if (n->value & GOVD_MAP)
6719 n->value |= GOVD_MAP_TO_ONLY;
6720 else
6721 return;
6723 else if ((ctx->region_type & ORT_TARGET) != 0)
6725 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6726 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6727 else
6728 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6730 else if (ctx->region_type != ORT_WORKSHARE
6731 && ctx->region_type != ORT_TASKGROUP
6732 && ctx->region_type != ORT_SIMD
6733 && ctx->region_type != ORT_ACC
6734 && !(ctx->region_type & ORT_TARGET_DATA))
6735 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6737 ctx = ctx->outer_context;
6739 while (ctx);
6742 /* Similarly for each of the type sizes of TYPE. */
6744 static void
6745 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6747 if (type == NULL || type == error_mark_node)
6748 return;
6749 type = TYPE_MAIN_VARIANT (type);
6751 if (ctx->privatized_types->add (type))
6752 return;
6754 switch (TREE_CODE (type))
6756 case INTEGER_TYPE:
6757 case ENUMERAL_TYPE:
6758 case BOOLEAN_TYPE:
6759 case REAL_TYPE:
6760 case FIXED_POINT_TYPE:
6761 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6762 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6763 break;
6765 case ARRAY_TYPE:
6766 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6767 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6768 break;
6770 case RECORD_TYPE:
6771 case UNION_TYPE:
6772 case QUAL_UNION_TYPE:
6774 tree field;
6775 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6776 if (TREE_CODE (field) == FIELD_DECL)
6778 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6779 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6782 break;
6784 case POINTER_TYPE:
6785 case REFERENCE_TYPE:
6786 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6787 break;
6789 default:
6790 break;
6793 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6794 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6795 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6798 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6800 static void
6801 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6803 splay_tree_node n;
6804 unsigned int nflags;
6805 tree t;
6807 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6808 return;
6810 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6811 there are constructors involved somewhere. Exception is a shared clause,
6812 there is nothing privatized in that case. */
6813 if ((flags & GOVD_SHARED) == 0
6814 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6815 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6816 flags |= GOVD_SEEN;
6818 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6819 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6821 /* We shouldn't be re-adding the decl with the same data
6822 sharing class. */
6823 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6824 nflags = n->value | flags;
6825 /* The only combination of data sharing classes we should see is
6826 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6827 reduction variables to be used in data sharing clauses. */
6828 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6829 || ((nflags & GOVD_DATA_SHARE_CLASS)
6830 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6831 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6832 n->value = nflags;
6833 return;
6836 /* When adding a variable-sized variable, we have to handle all sorts
6837 of additional bits of data: the pointer replacement variable, and
6838 the parameters of the type. */
6839 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6841 /* Add the pointer replacement variable as PRIVATE if the variable
6842 replacement is private, else FIRSTPRIVATE since we'll need the
6843 address of the original variable either for SHARED, or for the
6844 copy into or out of the context. */
6845 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
6847 if (flags & GOVD_MAP)
6848 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6849 else if (flags & GOVD_PRIVATE)
6850 nflags = GOVD_PRIVATE;
6851 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6852 && (flags & GOVD_FIRSTPRIVATE))
6853 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6854 else
6855 nflags = GOVD_FIRSTPRIVATE;
6856 nflags |= flags & GOVD_SEEN;
6857 t = DECL_VALUE_EXPR (decl);
6858 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6859 t = TREE_OPERAND (t, 0);
6860 gcc_assert (DECL_P (t));
6861 omp_add_variable (ctx, t, nflags);
6864 /* Add all of the variable and type parameters (which should have
6865 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6866 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6867 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6868 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6870 /* The variable-sized variable itself is never SHARED, only some form
6871 of PRIVATE. The sharing would take place via the pointer variable
6872 which we remapped above. */
6873 if (flags & GOVD_SHARED)
6874 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6875 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6877 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6878 alloca statement we generate for the variable, so make sure it
6879 is available. This isn't automatically needed for the SHARED
6880 case, since we won't be allocating local storage then.
6881 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6882 in this case omp_notice_variable will be called later
6883 on when it is gimplified. */
6884 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6885 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6886 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6888 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6889 && lang_hooks.decls.omp_privatize_by_reference (decl))
6891 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6893 /* Similar to the direct variable sized case above, we'll need the
6894 size of references being privatized. */
6895 if ((flags & GOVD_SHARED) == 0)
6897 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6898 if (DECL_P (t))
6899 omp_notice_variable (ctx, t, true);
6903 if (n != NULL)
6904 n->value |= flags;
6905 else
6906 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6908 /* For reductions clauses in OpenACC loop directives, by default create a
6909 copy clause on the enclosing parallel construct for carrying back the
6910 results. */
6911 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6913 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6914 while (outer_ctx)
6916 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6917 if (n != NULL)
6919 /* Ignore local variables and explicitly declared clauses. */
6920 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6921 break;
6922 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6924 /* According to the OpenACC spec, such a reduction variable
6925 should already have a copy map on a kernels construct,
6926 verify that here. */
6927 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6928 && (n->value & GOVD_MAP));
6930 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6932 /* Remove firstprivate and make it a copy map. */
6933 n->value &= ~GOVD_FIRSTPRIVATE;
6934 n->value |= GOVD_MAP;
6937 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6939 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6940 GOVD_MAP | GOVD_SEEN);
6941 break;
6943 outer_ctx = outer_ctx->outer_context;
6948 /* Notice a threadprivate variable DECL used in OMP context CTX.
6949 This just prints out diagnostics about threadprivate variable uses
6950 in untied tasks. If DECL2 is non-NULL, prevent this warning
6951 on that variable. */
6953 static bool
6954 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6955 tree decl2)
6957 splay_tree_node n;
6958 struct gimplify_omp_ctx *octx;
6960 for (octx = ctx; octx; octx = octx->outer_context)
6961 if ((octx->region_type & ORT_TARGET) != 0)
6963 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6964 if (n == NULL)
6966 error ("threadprivate variable %qE used in target region",
6967 DECL_NAME (decl));
6968 error_at (octx->location, "enclosing target region");
6969 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6971 if (decl2)
6972 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6975 if (ctx->region_type != ORT_UNTIED_TASK)
6976 return false;
6977 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6978 if (n == NULL)
6980 error ("threadprivate variable %qE used in untied task",
6981 DECL_NAME (decl));
6982 error_at (ctx->location, "enclosing task");
6983 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6985 if (decl2)
6986 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6987 return false;
6990 /* Return true if global var DECL is device resident. */
6992 static bool
6993 device_resident_p (tree decl)
6995 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6997 if (!attr)
6998 return false;
7000 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7002 tree c = TREE_VALUE (t);
7003 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7004 return true;
7007 return false;
7010 /* Return true if DECL has an ACC DECLARE attribute. */
7012 static bool
7013 is_oacc_declared (tree decl)
7015 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7016 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7017 return declared != NULL_TREE;
7020 /* Determine outer default flags for DECL mentioned in an OMP region
7021 but not declared in an enclosing clause.
7023 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7024 remapped firstprivate instead of shared. To some extent this is
7025 addressed in omp_firstprivatize_type_sizes, but not
7026 effectively. */
7028 static unsigned
7029 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7030 bool in_code, unsigned flags)
7032 enum omp_clause_default_kind default_kind = ctx->default_kind;
7033 enum omp_clause_default_kind kind;
7035 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7036 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7037 default_kind = kind;
7039 switch (default_kind)
7041 case OMP_CLAUSE_DEFAULT_NONE:
7043 const char *rtype;
7045 if (ctx->region_type & ORT_PARALLEL)
7046 rtype = "parallel";
7047 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7048 rtype = "taskloop";
7049 else if (ctx->region_type & ORT_TASK)
7050 rtype = "task";
7051 else if (ctx->region_type & ORT_TEAMS)
7052 rtype = "teams";
7053 else
7054 gcc_unreachable ();
7056 error ("%qE not specified in enclosing %qs",
7057 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7058 error_at (ctx->location, "enclosing %qs", rtype);
7060 /* FALLTHRU */
7061 case OMP_CLAUSE_DEFAULT_SHARED:
7062 flags |= GOVD_SHARED;
7063 break;
7064 case OMP_CLAUSE_DEFAULT_PRIVATE:
7065 flags |= GOVD_PRIVATE;
7066 break;
7067 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7068 flags |= GOVD_FIRSTPRIVATE;
7069 break;
7070 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7071 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7072 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7073 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7075 omp_notice_variable (octx, decl, in_code);
7076 for (; octx; octx = octx->outer_context)
7078 splay_tree_node n2;
7080 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7081 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7082 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7083 continue;
7084 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7086 flags |= GOVD_FIRSTPRIVATE;
7087 goto found_outer;
7089 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7091 flags |= GOVD_SHARED;
7092 goto found_outer;
7097 if (TREE_CODE (decl) == PARM_DECL
7098 || (!is_global_var (decl)
7099 && DECL_CONTEXT (decl) == current_function_decl))
7100 flags |= GOVD_FIRSTPRIVATE;
7101 else
7102 flags |= GOVD_SHARED;
7103 found_outer:
7104 break;
7106 default:
7107 gcc_unreachable ();
7110 return flags;
7114 /* Determine outer default flags for DECL mentioned in an OACC region
7115 but not declared in an enclosing clause. */
7117 static unsigned
7118 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7120 const char *rkind;
7121 bool on_device = false;
7122 bool declared = is_oacc_declared (decl);
7123 tree type = TREE_TYPE (decl);
7125 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7126 type = TREE_TYPE (type);
7128 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7129 && is_global_var (decl)
7130 && device_resident_p (decl))
7132 on_device = true;
7133 flags |= GOVD_MAP_TO_ONLY;
7136 switch (ctx->region_type)
7138 case ORT_ACC_KERNELS:
7139 rkind = "kernels";
7141 if (AGGREGATE_TYPE_P (type))
7143 /* Aggregates default to 'present_or_copy', or 'present'. */
7144 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7145 flags |= GOVD_MAP;
7146 else
7147 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7149 else
7150 /* Scalars default to 'copy'. */
7151 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7153 break;
7155 case ORT_ACC_PARALLEL:
7156 rkind = "parallel";
7158 if (on_device || declared)
7159 flags |= GOVD_MAP;
7160 else if (AGGREGATE_TYPE_P (type))
7162 /* Aggregates default to 'present_or_copy', or 'present'. */
7163 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7164 flags |= GOVD_MAP;
7165 else
7166 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7168 else
7169 /* Scalars default to 'firstprivate'. */
7170 flags |= GOVD_FIRSTPRIVATE;
7172 break;
7174 default:
7175 gcc_unreachable ();
7178 if (DECL_ARTIFICIAL (decl))
7179 ; /* We can get compiler-generated decls, and should not complain
7180 about them. */
7181 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7183 error ("%qE not specified in enclosing OpenACC %qs construct",
7184 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7185 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7187 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7188 ; /* Handled above. */
7189 else
7190 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7192 return flags;
7195 /* Record the fact that DECL was used within the OMP context CTX.
7196 IN_CODE is true when real code uses DECL, and false when we should
7197 merely emit default(none) errors. Return true if DECL is going to
7198 be remapped and thus DECL shouldn't be gimplified into its
7199 DECL_VALUE_EXPR (if any). */
7201 static bool
7202 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7204 splay_tree_node n;
7205 unsigned flags = in_code ? GOVD_SEEN : 0;
7206 bool ret = false, shared;
7208 if (error_operand_p (decl))
7209 return false;
7211 if (ctx->region_type == ORT_NONE)
7212 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7214 if (is_global_var (decl))
7216 /* Threadprivate variables are predetermined. */
7217 if (DECL_THREAD_LOCAL_P (decl))
7218 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7220 if (DECL_HAS_VALUE_EXPR_P (decl))
7222 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7224 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7225 return omp_notice_threadprivate_variable (ctx, decl, value);
7228 if (gimplify_omp_ctxp->outer_context == NULL
7229 && VAR_P (decl)
7230 && oacc_get_fn_attrib (current_function_decl))
7232 location_t loc = DECL_SOURCE_LOCATION (decl);
7234 if (lookup_attribute ("omp declare target link",
7235 DECL_ATTRIBUTES (decl)))
7237 error_at (loc,
7238 "%qE with %<link%> clause used in %<routine%> function",
7239 DECL_NAME (decl));
7240 return false;
7242 else if (!lookup_attribute ("omp declare target",
7243 DECL_ATTRIBUTES (decl)))
7245 error_at (loc,
7246 "%qE requires a %<declare%> directive for use "
7247 "in a %<routine%> function", DECL_NAME (decl));
7248 return false;
7253 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7254 if ((ctx->region_type & ORT_TARGET) != 0)
7256 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7257 if (n == NULL)
7259 unsigned nflags = flags;
7260 if ((ctx->region_type & ORT_ACC) == 0)
7262 bool is_declare_target = false;
7263 if (is_global_var (decl)
7264 && varpool_node::get_create (decl)->offloadable)
7266 struct gimplify_omp_ctx *octx;
7267 for (octx = ctx->outer_context;
7268 octx; octx = octx->outer_context)
7270 n = splay_tree_lookup (octx->variables,
7271 (splay_tree_key)decl);
7272 if (n
7273 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7274 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7275 break;
7277 is_declare_target = octx == NULL;
7279 if (!is_declare_target)
7281 int gdmk;
7282 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7283 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7284 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7285 == POINTER_TYPE)))
7286 gdmk = GDMK_POINTER;
7287 else if (lang_hooks.decls.omp_scalar_p (decl))
7288 gdmk = GDMK_SCALAR;
7289 else
7290 gdmk = GDMK_AGGREGATE;
7291 if (ctx->defaultmap[gdmk] == 0)
7293 tree d = lang_hooks.decls.omp_report_decl (decl);
7294 error ("%qE not specified in enclosing %<target%>",
7295 DECL_NAME (d));
7296 error_at (ctx->location, "enclosing %<target%>");
7298 else if (ctx->defaultmap[gdmk]
7299 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7300 nflags |= ctx->defaultmap[gdmk];
7301 else
7303 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7304 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7309 struct gimplify_omp_ctx *octx = ctx->outer_context;
7310 if ((ctx->region_type & ORT_ACC) && octx)
7312 /* Look in outer OpenACC contexts, to see if there's a
7313 data attribute for this variable. */
7314 omp_notice_variable (octx, decl, in_code);
7316 for (; octx; octx = octx->outer_context)
7318 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7319 break;
7320 splay_tree_node n2
7321 = splay_tree_lookup (octx->variables,
7322 (splay_tree_key) decl);
7323 if (n2)
7325 if (octx->region_type == ORT_ACC_HOST_DATA)
7326 error ("variable %qE declared in enclosing "
7327 "%<host_data%> region", DECL_NAME (decl));
7328 nflags |= GOVD_MAP;
7329 if (octx->region_type == ORT_ACC_DATA
7330 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7331 nflags |= GOVD_MAP_0LEN_ARRAY;
7332 goto found_outer;
7337 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7338 | GOVD_MAP_ALLOC_ONLY)) == flags)
7340 tree type = TREE_TYPE (decl);
7342 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7343 && lang_hooks.decls.omp_privatize_by_reference (decl))
7344 type = TREE_TYPE (type);
7345 if (!lang_hooks.types.omp_mappable_type (type))
7347 error ("%qD referenced in target region does not have "
7348 "a mappable type", decl);
7349 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7351 else
7353 if ((ctx->region_type & ORT_ACC) != 0)
7354 nflags = oacc_default_clause (ctx, decl, flags);
7355 else
7356 nflags |= GOVD_MAP;
7359 found_outer:
7360 omp_add_variable (ctx, decl, nflags);
7362 else
7364 /* If nothing changed, there's nothing left to do. */
7365 if ((n->value & flags) == flags)
7366 return ret;
7367 flags |= n->value;
7368 n->value = flags;
7370 goto do_outer;
7373 if (n == NULL)
7375 if (ctx->region_type == ORT_WORKSHARE
7376 || ctx->region_type == ORT_TASKGROUP
7377 || ctx->region_type == ORT_SIMD
7378 || ctx->region_type == ORT_ACC
7379 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7380 goto do_outer;
7382 flags = omp_default_clause (ctx, decl, in_code, flags);
7384 if ((flags & GOVD_PRIVATE)
7385 && lang_hooks.decls.omp_private_outer_ref (decl))
7386 flags |= GOVD_PRIVATE_OUTER_REF;
7388 omp_add_variable (ctx, decl, flags);
7390 shared = (flags & GOVD_SHARED) != 0;
7391 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7392 goto do_outer;
7395 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7396 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7397 && DECL_SIZE (decl))
7399 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7401 splay_tree_node n2;
7402 tree t = DECL_VALUE_EXPR (decl);
7403 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7404 t = TREE_OPERAND (t, 0);
7405 gcc_assert (DECL_P (t));
7406 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7407 n2->value |= GOVD_SEEN;
7409 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7410 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7411 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7412 != INTEGER_CST))
7414 splay_tree_node n2;
7415 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7416 gcc_assert (DECL_P (t));
7417 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7418 if (n2)
7419 omp_notice_variable (ctx, t, true);
7423 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7424 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7426 /* If nothing changed, there's nothing left to do. */
7427 if ((n->value & flags) == flags)
7428 return ret;
7429 flags |= n->value;
7430 n->value = flags;
7432 do_outer:
7433 /* If the variable is private in the current context, then we don't
7434 need to propagate anything to an outer context. */
7435 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7436 return ret;
7437 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7438 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7439 return ret;
7440 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7441 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7442 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7443 return ret;
7444 if (ctx->outer_context
7445 && omp_notice_variable (ctx->outer_context, decl, in_code))
7446 return true;
7447 return ret;
7450 /* Verify that DECL is private within CTX. If there's specific information
7451 to the contrary in the innermost scope, generate an error. */
7453 static bool
7454 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7456 splay_tree_node n;
7458 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7459 if (n != NULL)
7461 if (n->value & GOVD_SHARED)
7463 if (ctx == gimplify_omp_ctxp)
7465 if (simd)
7466 error ("iteration variable %qE is predetermined linear",
7467 DECL_NAME (decl));
7468 else
7469 error ("iteration variable %qE should be private",
7470 DECL_NAME (decl));
7471 n->value = GOVD_PRIVATE;
7472 return true;
7474 else
7475 return false;
7477 else if ((n->value & GOVD_EXPLICIT) != 0
7478 && (ctx == gimplify_omp_ctxp
7479 || (ctx->region_type == ORT_COMBINED_PARALLEL
7480 && gimplify_omp_ctxp->outer_context == ctx)))
7482 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7483 error ("iteration variable %qE should not be firstprivate",
7484 DECL_NAME (decl));
7485 else if ((n->value & GOVD_REDUCTION) != 0)
7486 error ("iteration variable %qE should not be reduction",
7487 DECL_NAME (decl));
7488 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7489 error ("iteration variable %qE should not be linear",
7490 DECL_NAME (decl));
7492 return (ctx == gimplify_omp_ctxp
7493 || (ctx->region_type == ORT_COMBINED_PARALLEL
7494 && gimplify_omp_ctxp->outer_context == ctx));
7497 if (ctx->region_type != ORT_WORKSHARE
7498 && ctx->region_type != ORT_TASKGROUP
7499 && ctx->region_type != ORT_SIMD
7500 && ctx->region_type != ORT_ACC)
7501 return false;
7502 else if (ctx->outer_context)
7503 return omp_is_private (ctx->outer_context, decl, simd);
7504 return false;
7507 /* Return true if DECL is private within a parallel region
7508 that binds to the current construct's context or in parallel
7509 region's REDUCTION clause. */
7511 static bool
7512 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7514 splay_tree_node n;
7518 ctx = ctx->outer_context;
7519 if (ctx == NULL)
7521 if (is_global_var (decl))
7522 return false;
7524 /* References might be private, but might be shared too,
7525 when checking for copyprivate, assume they might be
7526 private, otherwise assume they might be shared. */
7527 if (copyprivate)
7528 return true;
7530 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7531 return false;
7533 /* Treat C++ privatized non-static data members outside
7534 of the privatization the same. */
7535 if (omp_member_access_dummy_var (decl))
7536 return false;
7538 return true;
7541 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7543 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7544 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7545 continue;
7547 if (n != NULL)
7549 if ((n->value & GOVD_LOCAL) != 0
7550 && omp_member_access_dummy_var (decl))
7551 return false;
7552 return (n->value & GOVD_SHARED) == 0;
7555 while (ctx->region_type == ORT_WORKSHARE
7556 || ctx->region_type == ORT_TASKGROUP
7557 || ctx->region_type == ORT_SIMD
7558 || ctx->region_type == ORT_ACC);
7559 return false;
7562 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7564 static tree
7565 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7567 tree t = *tp;
7569 /* If this node has been visited, unmark it and keep looking. */
7570 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7571 return t;
7573 if (IS_TYPE_OR_DECL_P (t))
7574 *walk_subtrees = 0;
7575 return NULL_TREE;
7578 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7579 lower all the depend clauses by populating corresponding depend
7580 array. Returns 0 if there are no such depend clauses, or
7581 2 if all depend clauses should be removed, 1 otherwise. */
7583 static int
7584 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7586 tree c;
7587 gimple *g;
7588 size_t n[4] = { 0, 0, 0, 0 };
7589 bool unused[4];
7590 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7591 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7592 size_t i, j;
7593 location_t first_loc = UNKNOWN_LOCATION;
7595 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7596 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7598 switch (OMP_CLAUSE_DEPEND_KIND (c))
7600 case OMP_CLAUSE_DEPEND_IN:
7601 i = 2;
7602 break;
7603 case OMP_CLAUSE_DEPEND_OUT:
7604 case OMP_CLAUSE_DEPEND_INOUT:
7605 i = 0;
7606 break;
7607 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7608 i = 1;
7609 break;
7610 case OMP_CLAUSE_DEPEND_DEPOBJ:
7611 i = 3;
7612 break;
7613 case OMP_CLAUSE_DEPEND_SOURCE:
7614 case OMP_CLAUSE_DEPEND_SINK:
7615 continue;
7616 default:
7617 gcc_unreachable ();
7619 tree t = OMP_CLAUSE_DECL (c);
7620 if (first_loc == UNKNOWN_LOCATION)
7621 first_loc = OMP_CLAUSE_LOCATION (c);
7622 if (TREE_CODE (t) == TREE_LIST
7623 && TREE_PURPOSE (t)
7624 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7626 if (TREE_PURPOSE (t) != last_iter)
7628 tree tcnt = size_one_node;
7629 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7631 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7632 is_gimple_val, fb_rvalue) == GS_ERROR
7633 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7634 is_gimple_val, fb_rvalue) == GS_ERROR
7635 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7636 is_gimple_val, fb_rvalue) == GS_ERROR
7637 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7638 is_gimple_val, fb_rvalue)
7639 == GS_ERROR))
7640 return 2;
7641 tree var = TREE_VEC_ELT (it, 0);
7642 tree begin = TREE_VEC_ELT (it, 1);
7643 tree end = TREE_VEC_ELT (it, 2);
7644 tree step = TREE_VEC_ELT (it, 3);
7645 tree orig_step = TREE_VEC_ELT (it, 4);
7646 tree type = TREE_TYPE (var);
7647 tree stype = TREE_TYPE (step);
7648 location_t loc = DECL_SOURCE_LOCATION (var);
7649 tree endmbegin;
7650 /* Compute count for this iterator as
7651 orig_step > 0
7652 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7653 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7654 and compute product of those for the entire depend
7655 clause. */
7656 if (POINTER_TYPE_P (type))
7657 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7658 stype, end, begin);
7659 else
7660 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7661 end, begin);
7662 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7663 step,
7664 build_int_cst (stype, 1));
7665 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7666 build_int_cst (stype, 1));
7667 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7668 unshare_expr (endmbegin),
7669 stepm1);
7670 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7671 pos, step);
7672 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7673 endmbegin, stepp1);
7674 if (TYPE_UNSIGNED (stype))
7676 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7677 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7679 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7680 neg, step);
7681 step = NULL_TREE;
7682 tree cond = fold_build2_loc (loc, LT_EXPR,
7683 boolean_type_node,
7684 begin, end);
7685 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7686 build_int_cst (stype, 0));
7687 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7688 end, begin);
7689 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7690 build_int_cst (stype, 0));
7691 tree osteptype = TREE_TYPE (orig_step);
7692 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7693 orig_step,
7694 build_int_cst (osteptype, 0));
7695 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7696 cond, pos, neg);
7697 cnt = fold_convert_loc (loc, sizetype, cnt);
7698 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7699 fb_rvalue) == GS_ERROR)
7700 return 2;
7701 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7703 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7704 fb_rvalue) == GS_ERROR)
7705 return 2;
7706 last_iter = TREE_PURPOSE (t);
7707 last_count = tcnt;
7709 if (counts[i] == NULL_TREE)
7710 counts[i] = last_count;
7711 else
7712 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7713 PLUS_EXPR, counts[i], last_count);
7715 else
7716 n[i]++;
7718 for (i = 0; i < 4; i++)
7719 if (counts[i])
7720 break;
7721 if (i == 4)
7722 return 0;
7724 tree total = size_zero_node;
7725 for (i = 0; i < 4; i++)
7727 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7728 if (counts[i] == NULL_TREE)
7729 counts[i] = size_zero_node;
7730 if (n[i])
7731 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7732 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7733 fb_rvalue) == GS_ERROR)
7734 return 2;
7735 total = size_binop (PLUS_EXPR, total, counts[i]);
7738 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7739 == GS_ERROR)
7740 return 2;
7741 bool is_old = unused[1] && unused[3];
7742 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7743 size_int (is_old ? 1 : 4));
7744 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7745 tree array = create_tmp_var_raw (type);
7746 TREE_ADDRESSABLE (array) = 1;
7747 if (TREE_CODE (totalpx) != INTEGER_CST)
7749 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7750 gimplify_type_sizes (TREE_TYPE (array), pre_p);
7751 if (gimplify_omp_ctxp)
7753 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7754 while (ctx
7755 && (ctx->region_type == ORT_WORKSHARE
7756 || ctx->region_type == ORT_TASKGROUP
7757 || ctx->region_type == ORT_SIMD
7758 || ctx->region_type == ORT_ACC))
7759 ctx = ctx->outer_context;
7760 if (ctx)
7761 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7763 gimplify_vla_decl (array, pre_p);
7765 else
7766 gimple_add_tmp_var (array);
7767 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7768 NULL_TREE);
7769 tree tem;
7770 if (!is_old)
7772 tem = build2 (MODIFY_EXPR, void_type_node, r,
7773 build_int_cst (ptr_type_node, 0));
7774 gimplify_and_add (tem, pre_p);
7775 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7776 NULL_TREE);
7778 tem = build2 (MODIFY_EXPR, void_type_node, r,
7779 fold_convert (ptr_type_node, total));
7780 gimplify_and_add (tem, pre_p);
7781 for (i = 1; i < (is_old ? 2 : 4); i++)
7783 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
7784 NULL_TREE, NULL_TREE);
7785 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
7786 gimplify_and_add (tem, pre_p);
7789 tree cnts[4];
7790 for (j = 4; j; j--)
7791 if (!unused[j - 1])
7792 break;
7793 for (i = 0; i < 4; i++)
7795 if (i && (i >= j || unused[i - 1]))
7797 cnts[i] = cnts[i - 1];
7798 continue;
7800 cnts[i] = create_tmp_var (sizetype);
7801 if (i == 0)
7802 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
7803 else
7805 tree t;
7806 if (is_old)
7807 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
7808 else
7809 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
7810 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
7811 == GS_ERROR)
7812 return 2;
7813 g = gimple_build_assign (cnts[i], t);
7815 gimple_seq_add_stmt (pre_p, g);
7818 last_iter = NULL_TREE;
7819 tree last_bind = NULL_TREE;
7820 tree *last_body = NULL;
7821 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7822 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7824 switch (OMP_CLAUSE_DEPEND_KIND (c))
7826 case OMP_CLAUSE_DEPEND_IN:
7827 i = 2;
7828 break;
7829 case OMP_CLAUSE_DEPEND_OUT:
7830 case OMP_CLAUSE_DEPEND_INOUT:
7831 i = 0;
7832 break;
7833 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7834 i = 1;
7835 break;
7836 case OMP_CLAUSE_DEPEND_DEPOBJ:
7837 i = 3;
7838 break;
7839 case OMP_CLAUSE_DEPEND_SOURCE:
7840 case OMP_CLAUSE_DEPEND_SINK:
7841 continue;
7842 default:
7843 gcc_unreachable ();
7845 tree t = OMP_CLAUSE_DECL (c);
7846 if (TREE_CODE (t) == TREE_LIST
7847 && TREE_PURPOSE (t)
7848 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7850 if (TREE_PURPOSE (t) != last_iter)
7852 if (last_bind)
7853 gimplify_and_add (last_bind, pre_p);
7854 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
7855 last_bind = build3 (BIND_EXPR, void_type_node,
7856 BLOCK_VARS (block), NULL, block);
7857 TREE_SIDE_EFFECTS (last_bind) = 1;
7858 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
7859 tree *p = &BIND_EXPR_BODY (last_bind);
7860 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7862 tree var = TREE_VEC_ELT (it, 0);
7863 tree begin = TREE_VEC_ELT (it, 1);
7864 tree end = TREE_VEC_ELT (it, 2);
7865 tree step = TREE_VEC_ELT (it, 3);
7866 tree orig_step = TREE_VEC_ELT (it, 4);
7867 tree type = TREE_TYPE (var);
7868 location_t loc = DECL_SOURCE_LOCATION (var);
7869 /* Emit:
7870 var = begin;
7871 goto cond_label;
7872 beg_label:
7874 var = var + step;
7875 cond_label:
7876 if (orig_step > 0) {
7877 if (var < end) goto beg_label;
7878 } else {
7879 if (var > end) goto beg_label;
7881 for each iterator, with inner iterators added to
7882 the ... above. */
7883 tree beg_label = create_artificial_label (loc);
7884 tree cond_label = NULL_TREE;
7885 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
7886 var, begin);
7887 append_to_statement_list_force (tem, p);
7888 tem = build_and_jump (&cond_label);
7889 append_to_statement_list_force (tem, p);
7890 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
7891 append_to_statement_list (tem, p);
7892 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
7893 NULL_TREE, NULL_TREE);
7894 TREE_SIDE_EFFECTS (bind) = 1;
7895 SET_EXPR_LOCATION (bind, loc);
7896 append_to_statement_list_force (bind, p);
7897 if (POINTER_TYPE_P (type))
7898 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
7899 var, fold_convert_loc (loc, sizetype,
7900 step));
7901 else
7902 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
7903 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
7904 var, tem);
7905 append_to_statement_list_force (tem, p);
7906 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
7907 append_to_statement_list (tem, p);
7908 tree cond = fold_build2_loc (loc, LT_EXPR,
7909 boolean_type_node,
7910 var, end);
7911 tree pos
7912 = fold_build3_loc (loc, COND_EXPR, void_type_node,
7913 cond, build_and_jump (&beg_label),
7914 void_node);
7915 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7916 var, end);
7917 tree neg
7918 = fold_build3_loc (loc, COND_EXPR, void_type_node,
7919 cond, build_and_jump (&beg_label),
7920 void_node);
7921 tree osteptype = TREE_TYPE (orig_step);
7922 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7923 orig_step,
7924 build_int_cst (osteptype, 0));
7925 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
7926 cond, pos, neg);
7927 append_to_statement_list_force (tem, p);
7928 p = &BIND_EXPR_BODY (bind);
7930 last_body = p;
7932 last_iter = TREE_PURPOSE (t);
7933 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
7935 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
7936 0), last_body);
7937 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
7939 if (error_operand_p (TREE_VALUE (t)))
7940 return 2;
7941 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
7942 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
7943 NULL_TREE, NULL_TREE);
7944 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
7945 void_type_node, r, TREE_VALUE (t));
7946 append_to_statement_list_force (tem, last_body);
7947 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
7948 void_type_node, cnts[i],
7949 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
7950 append_to_statement_list_force (tem, last_body);
7951 TREE_VALUE (t) = null_pointer_node;
7953 else
7955 if (last_bind)
7957 gimplify_and_add (last_bind, pre_p);
7958 last_bind = NULL_TREE;
7960 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7962 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7963 NULL, is_gimple_val, fb_rvalue);
7964 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7966 if (error_operand_p (OMP_CLAUSE_DECL (c)))
7967 return 2;
7968 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
7969 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7970 is_gimple_val, fb_rvalue) == GS_ERROR)
7971 return 2;
7972 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
7973 NULL_TREE, NULL_TREE);
7974 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
7975 gimplify_and_add (tem, pre_p);
7976 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
7977 size_int (1)));
7978 gimple_seq_add_stmt (pre_p, g);
7981 if (last_bind)
7982 gimplify_and_add (last_bind, pre_p);
7983 tree cond = boolean_false_node;
7984 if (is_old)
7986 if (!unused[0])
7987 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
7988 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
7989 size_int (2)));
7990 if (!unused[2])
7991 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
7992 build2_loc (first_loc, NE_EXPR, boolean_type_node,
7993 cnts[2],
7994 size_binop_loc (first_loc, PLUS_EXPR,
7995 totalpx,
7996 size_int (1))));
7998 else
8000 tree prev = size_int (5);
8001 for (i = 0; i < 4; i++)
8003 if (unused[i])
8004 continue;
8005 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8006 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8007 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8008 cnts[i], unshare_expr (prev)));
8011 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8012 build_call_expr_loc (first_loc,
8013 builtin_decl_explicit (BUILT_IN_TRAP),
8014 0), void_node);
8015 gimplify_and_add (tem, pre_p);
8016 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8017 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8018 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8019 OMP_CLAUSE_CHAIN (c) = *list_p;
8020 *list_p = c;
8021 return 1;
8024 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8025 and previous omp contexts. */
8027 static void
8028 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
8029 enum omp_region_type region_type,
8030 enum tree_code code)
8032 struct gimplify_omp_ctx *ctx, *outer_ctx;
8033 tree c;
8034 hash_map<tree, tree> *struct_map_to_clause = NULL;
8035 tree *prev_list_p = NULL;
8036 int handled_depend_iterators = -1;
8037 int nowait = -1;
8039 ctx = new_omp_context (region_type);
8040 outer_ctx = ctx->outer_context;
8041 if (code == OMP_TARGET)
8043 if (!lang_GNU_Fortran ())
8044 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8045 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
8047 if (!lang_GNU_Fortran ())
8048 switch (code)
8050 case OMP_TARGET:
8051 case OMP_TARGET_DATA:
8052 case OMP_TARGET_ENTER_DATA:
8053 case OMP_TARGET_EXIT_DATA:
8054 case OACC_DECLARE:
8055 case OACC_HOST_DATA:
8056 case OACC_PARALLEL:
8057 case OACC_KERNELS:
8058 ctx->target_firstprivatize_array_bases = true;
8059 default:
8060 break;
8063 while ((c = *list_p) != NULL)
8065 bool remove = false;
8066 bool notice_outer = true;
8067 const char *check_non_private = NULL;
8068 unsigned int flags;
8069 tree decl;
8071 switch (OMP_CLAUSE_CODE (c))
8073 case OMP_CLAUSE_PRIVATE:
8074 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8075 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8077 flags |= GOVD_PRIVATE_OUTER_REF;
8078 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8080 else
8081 notice_outer = false;
8082 goto do_add;
8083 case OMP_CLAUSE_SHARED:
8084 flags = GOVD_SHARED | GOVD_EXPLICIT;
8085 goto do_add;
8086 case OMP_CLAUSE_FIRSTPRIVATE:
8087 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8088 check_non_private = "firstprivate";
8089 goto do_add;
8090 case OMP_CLAUSE_LASTPRIVATE:
8091 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8092 switch (code)
8094 case OMP_DISTRIBUTE:
8095 error_at (OMP_CLAUSE_LOCATION (c),
8096 "conditional %<lastprivate%> clause on "
8097 "%<distribute%> construct");
8098 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8099 break;
8100 case OMP_TASKLOOP:
8101 error_at (OMP_CLAUSE_LOCATION (c),
8102 "conditional %<lastprivate%> clause on "
8103 "%<taskloop%> construct");
8104 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8105 break;
8106 default:
8107 break;
8109 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8110 check_non_private = "lastprivate";
8111 decl = OMP_CLAUSE_DECL (c);
8112 if (error_operand_p (decl))
8113 goto do_add;
8114 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8115 && !lang_hooks.decls.omp_scalar_p (decl))
8117 error_at (OMP_CLAUSE_LOCATION (c),
8118 "non-scalar variable %qD in conditional "
8119 "%<lastprivate%> clause", decl);
8120 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8122 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8123 sorry_at (OMP_CLAUSE_LOCATION (c),
8124 "%<conditional%> modifier on %<lastprivate%> clause "
8125 "not supported yet");
8126 if (outer_ctx
8127 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8128 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8129 == ORT_COMBINED_TEAMS))
8130 && splay_tree_lookup (outer_ctx->variables,
8131 (splay_tree_key) decl) == NULL)
8133 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8134 if (outer_ctx->outer_context)
8135 omp_notice_variable (outer_ctx->outer_context, decl, true);
8137 else if (outer_ctx
8138 && (outer_ctx->region_type & ORT_TASK) != 0
8139 && outer_ctx->combined_loop
8140 && splay_tree_lookup (outer_ctx->variables,
8141 (splay_tree_key) decl) == NULL)
8143 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8144 if (outer_ctx->outer_context)
8145 omp_notice_variable (outer_ctx->outer_context, decl, true);
8147 else if (outer_ctx
8148 && (outer_ctx->region_type == ORT_WORKSHARE
8149 || outer_ctx->region_type == ORT_ACC)
8150 && outer_ctx->combined_loop
8151 && splay_tree_lookup (outer_ctx->variables,
8152 (splay_tree_key) decl) == NULL
8153 && !omp_check_private (outer_ctx, decl, false))
8155 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8156 if (outer_ctx->outer_context
8157 && (outer_ctx->outer_context->region_type
8158 == ORT_COMBINED_PARALLEL)
8159 && splay_tree_lookup (outer_ctx->outer_context->variables,
8160 (splay_tree_key) decl) == NULL)
8162 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8163 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8164 if (octx->outer_context)
8166 octx = octx->outer_context;
8167 if (octx->region_type == ORT_WORKSHARE
8168 && octx->combined_loop
8169 && splay_tree_lookup (octx->variables,
8170 (splay_tree_key) decl) == NULL
8171 && !omp_check_private (octx, decl, false))
8173 omp_add_variable (octx, decl,
8174 GOVD_LASTPRIVATE | GOVD_SEEN);
8175 octx = octx->outer_context;
8176 if (octx
8177 && ((octx->region_type & ORT_COMBINED_TEAMS)
8178 == ORT_COMBINED_TEAMS)
8179 && (splay_tree_lookup (octx->variables,
8180 (splay_tree_key) decl)
8181 == NULL))
8183 omp_add_variable (octx, decl,
8184 GOVD_SHARED | GOVD_SEEN);
8185 octx = octx->outer_context;
8188 if (octx)
8189 omp_notice_variable (octx, decl, true);
8192 else if (outer_ctx->outer_context)
8193 omp_notice_variable (outer_ctx->outer_context, decl, true);
8195 goto do_add;
8196 case OMP_CLAUSE_REDUCTION:
8197 if (OMP_CLAUSE_REDUCTION_TASK (c))
8199 if (region_type == ORT_WORKSHARE)
8201 if (nowait == -1)
8202 nowait = omp_find_clause (*list_p,
8203 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8204 if (nowait
8205 && (outer_ctx == NULL
8206 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8208 error_at (OMP_CLAUSE_LOCATION (c),
8209 "%<task%> reduction modifier on a construct "
8210 "with a %<nowait%> clause");
8211 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8214 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8216 error_at (OMP_CLAUSE_LOCATION (c),
8217 "invalid %<task%> reduction modifier on construct "
8218 "other than %<parallel%>, %<for%> or %<sections%>");
8219 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8222 /* FALLTHRU */
8223 case OMP_CLAUSE_IN_REDUCTION:
8224 case OMP_CLAUSE_TASK_REDUCTION:
8225 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8226 /* OpenACC permits reductions on private variables. */
8227 if (!(region_type & ORT_ACC)
8228 /* taskgroup is actually not a worksharing region. */
8229 && code != OMP_TASKGROUP)
8230 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8231 decl = OMP_CLAUSE_DECL (c);
8232 if (TREE_CODE (decl) == MEM_REF)
8234 tree type = TREE_TYPE (decl);
8235 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8236 NULL, is_gimple_val, fb_rvalue, false)
8237 == GS_ERROR)
8239 remove = true;
8240 break;
8242 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8243 if (DECL_P (v))
8245 omp_firstprivatize_variable (ctx, v);
8246 omp_notice_variable (ctx, v, true);
8248 decl = TREE_OPERAND (decl, 0);
8249 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8251 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8252 NULL, is_gimple_val, fb_rvalue, false)
8253 == GS_ERROR)
8255 remove = true;
8256 break;
8258 v = TREE_OPERAND (decl, 1);
8259 if (DECL_P (v))
8261 omp_firstprivatize_variable (ctx, v);
8262 omp_notice_variable (ctx, v, true);
8264 decl = TREE_OPERAND (decl, 0);
8266 if (TREE_CODE (decl) == ADDR_EXPR
8267 || TREE_CODE (decl) == INDIRECT_REF)
8268 decl = TREE_OPERAND (decl, 0);
8270 goto do_add_decl;
8271 case OMP_CLAUSE_LINEAR:
8272 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8273 is_gimple_val, fb_rvalue) == GS_ERROR)
8275 remove = true;
8276 break;
8278 else
8280 if (code == OMP_SIMD
8281 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8283 struct gimplify_omp_ctx *octx = outer_ctx;
8284 if (octx
8285 && octx->region_type == ORT_WORKSHARE
8286 && octx->combined_loop
8287 && !octx->distribute)
8289 if (octx->outer_context
8290 && (octx->outer_context->region_type
8291 == ORT_COMBINED_PARALLEL))
8292 octx = octx->outer_context->outer_context;
8293 else
8294 octx = octx->outer_context;
8296 if (octx
8297 && octx->region_type == ORT_WORKSHARE
8298 && octx->combined_loop
8299 && octx->distribute)
8301 error_at (OMP_CLAUSE_LOCATION (c),
8302 "%<linear%> clause for variable other than "
8303 "loop iterator specified on construct "
8304 "combined with %<distribute%>");
8305 remove = true;
8306 break;
8309 /* For combined #pragma omp parallel for simd, need to put
8310 lastprivate and perhaps firstprivate too on the
8311 parallel. Similarly for #pragma omp for simd. */
8312 struct gimplify_omp_ctx *octx = outer_ctx;
8313 decl = NULL_TREE;
8316 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8317 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8318 break;
8319 decl = OMP_CLAUSE_DECL (c);
8320 if (error_operand_p (decl))
8322 decl = NULL_TREE;
8323 break;
8325 flags = GOVD_SEEN;
8326 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8327 flags |= GOVD_FIRSTPRIVATE;
8328 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8329 flags |= GOVD_LASTPRIVATE;
8330 if (octx
8331 && octx->region_type == ORT_WORKSHARE
8332 && octx->combined_loop)
8334 if (octx->outer_context
8335 && (octx->outer_context->region_type
8336 == ORT_COMBINED_PARALLEL))
8337 octx = octx->outer_context;
8338 else if (omp_check_private (octx, decl, false))
8339 break;
8341 else if (octx
8342 && (octx->region_type & ORT_TASK) != 0
8343 && octx->combined_loop)
8345 else if (octx
8346 && octx->region_type == ORT_COMBINED_PARALLEL
8347 && ctx->region_type == ORT_WORKSHARE
8348 && octx == outer_ctx)
8349 flags = GOVD_SEEN | GOVD_SHARED;
8350 else if (octx
8351 && ((octx->region_type & ORT_COMBINED_TEAMS)
8352 == ORT_COMBINED_TEAMS))
8353 flags = GOVD_SEEN | GOVD_SHARED;
8354 else if (octx
8355 && octx->region_type == ORT_COMBINED_TARGET)
8357 flags &= ~GOVD_LASTPRIVATE;
8358 if (flags == GOVD_SEEN)
8359 break;
8361 else
8362 break;
8363 splay_tree_node on
8364 = splay_tree_lookup (octx->variables,
8365 (splay_tree_key) decl);
8366 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
8368 octx = NULL;
8369 break;
8371 omp_add_variable (octx, decl, flags);
8372 if (octx->outer_context == NULL)
8373 break;
8374 octx = octx->outer_context;
8376 while (1);
8377 if (octx
8378 && decl
8379 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8380 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8381 omp_notice_variable (octx, decl, true);
8383 flags = GOVD_LINEAR | GOVD_EXPLICIT;
8384 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8385 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8387 notice_outer = false;
8388 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8390 goto do_add;
8392 case OMP_CLAUSE_MAP:
8393 decl = OMP_CLAUSE_DECL (c);
8394 if (error_operand_p (decl))
8395 remove = true;
8396 switch (code)
8398 case OMP_TARGET:
8399 break;
8400 case OACC_DATA:
8401 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8402 break;
8403 /* FALLTHRU */
8404 case OMP_TARGET_DATA:
8405 case OMP_TARGET_ENTER_DATA:
8406 case OMP_TARGET_EXIT_DATA:
8407 case OACC_ENTER_DATA:
8408 case OACC_EXIT_DATA:
8409 case OACC_HOST_DATA:
8410 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8411 || (OMP_CLAUSE_MAP_KIND (c)
8412 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8413 /* For target {,enter ,exit }data only the array slice is
8414 mapped, but not the pointer to it. */
8415 remove = true;
8416 break;
8417 default:
8418 break;
8420 if (remove)
8421 break;
8422 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8424 struct gimplify_omp_ctx *octx;
8425 for (octx = outer_ctx; octx; octx = octx->outer_context)
8427 if (octx->region_type != ORT_ACC_HOST_DATA)
8428 break;
8429 splay_tree_node n2
8430 = splay_tree_lookup (octx->variables,
8431 (splay_tree_key) decl);
8432 if (n2)
8433 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8434 "declared in enclosing %<host_data%> region",
8435 DECL_NAME (decl));
8438 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8439 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8440 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8441 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8442 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8444 remove = true;
8445 break;
8447 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8448 || (OMP_CLAUSE_MAP_KIND (c)
8449 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8450 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8452 OMP_CLAUSE_SIZE (c)
8453 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8454 false);
8455 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
8456 GOVD_FIRSTPRIVATE | GOVD_SEEN);
8458 if (!DECL_P (decl))
8460 tree d = decl, *pd;
8461 if (TREE_CODE (d) == ARRAY_REF)
8463 while (TREE_CODE (d) == ARRAY_REF)
8464 d = TREE_OPERAND (d, 0);
8465 if (TREE_CODE (d) == COMPONENT_REF
8466 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
8467 decl = d;
8469 pd = &OMP_CLAUSE_DECL (c);
8470 if (d == decl
8471 && TREE_CODE (decl) == INDIRECT_REF
8472 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8473 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8474 == REFERENCE_TYPE))
8476 pd = &TREE_OPERAND (decl, 0);
8477 decl = TREE_OPERAND (decl, 0);
8479 if (TREE_CODE (decl) == COMPONENT_REF)
8481 while (TREE_CODE (decl) == COMPONENT_REF)
8482 decl = TREE_OPERAND (decl, 0);
8483 if (TREE_CODE (decl) == INDIRECT_REF
8484 && DECL_P (TREE_OPERAND (decl, 0))
8485 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8486 == REFERENCE_TYPE))
8487 decl = TREE_OPERAND (decl, 0);
8489 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
8490 == GS_ERROR)
8492 remove = true;
8493 break;
8495 if (DECL_P (decl))
8497 if (error_operand_p (decl))
8499 remove = true;
8500 break;
8503 tree stype = TREE_TYPE (decl);
8504 if (TREE_CODE (stype) == REFERENCE_TYPE)
8505 stype = TREE_TYPE (stype);
8506 if (TYPE_SIZE_UNIT (stype) == NULL
8507 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
8509 error_at (OMP_CLAUSE_LOCATION (c),
8510 "mapping field %qE of variable length "
8511 "structure", OMP_CLAUSE_DECL (c));
8512 remove = true;
8513 break;
8516 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8518 /* Error recovery. */
8519 if (prev_list_p == NULL)
8521 remove = true;
8522 break;
8524 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8526 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
8527 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
8529 remove = true;
8530 break;
8535 tree offset;
8536 poly_int64 bitsize, bitpos;
8537 machine_mode mode;
8538 int unsignedp, reversep, volatilep = 0;
8539 tree base = OMP_CLAUSE_DECL (c);
8540 while (TREE_CODE (base) == ARRAY_REF)
8541 base = TREE_OPERAND (base, 0);
8542 if (TREE_CODE (base) == INDIRECT_REF)
8543 base = TREE_OPERAND (base, 0);
8544 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8545 &mode, &unsignedp, &reversep,
8546 &volatilep);
8547 tree orig_base = base;
8548 if ((TREE_CODE (base) == INDIRECT_REF
8549 || (TREE_CODE (base) == MEM_REF
8550 && integer_zerop (TREE_OPERAND (base, 1))))
8551 && DECL_P (TREE_OPERAND (base, 0))
8552 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8553 == REFERENCE_TYPE))
8554 base = TREE_OPERAND (base, 0);
8555 gcc_assert (base == decl
8556 && (offset == NULL_TREE
8557 || poly_int_tree_p (offset)));
8559 splay_tree_node n
8560 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8561 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
8562 == GOMP_MAP_ALWAYS_POINTER);
8563 if (n == NULL || (n->value & GOVD_MAP) == 0)
8565 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8566 OMP_CLAUSE_MAP);
8567 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
8568 if (orig_base != base)
8569 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
8570 else
8571 OMP_CLAUSE_DECL (l) = decl;
8572 OMP_CLAUSE_SIZE (l) = size_int (1);
8573 if (struct_map_to_clause == NULL)
8574 struct_map_to_clause = new hash_map<tree, tree>;
8575 struct_map_to_clause->put (decl, l);
8576 if (ptr)
8578 enum gomp_map_kind mkind
8579 = code == OMP_TARGET_EXIT_DATA
8580 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8581 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8582 OMP_CLAUSE_MAP);
8583 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8584 OMP_CLAUSE_DECL (c2)
8585 = unshare_expr (OMP_CLAUSE_DECL (c));
8586 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
8587 OMP_CLAUSE_SIZE (c2)
8588 = TYPE_SIZE_UNIT (ptr_type_node);
8589 OMP_CLAUSE_CHAIN (l) = c2;
8590 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8592 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8593 tree c3
8594 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8595 OMP_CLAUSE_MAP);
8596 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8597 OMP_CLAUSE_DECL (c3)
8598 = unshare_expr (OMP_CLAUSE_DECL (c4));
8599 OMP_CLAUSE_SIZE (c3)
8600 = TYPE_SIZE_UNIT (ptr_type_node);
8601 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8602 OMP_CLAUSE_CHAIN (c2) = c3;
8604 *prev_list_p = l;
8605 prev_list_p = NULL;
8607 else
8609 OMP_CLAUSE_CHAIN (l) = c;
8610 *list_p = l;
8611 list_p = &OMP_CLAUSE_CHAIN (l);
8613 if (orig_base != base && code == OMP_TARGET)
8615 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8616 OMP_CLAUSE_MAP);
8617 enum gomp_map_kind mkind
8618 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
8619 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8620 OMP_CLAUSE_DECL (c2) = decl;
8621 OMP_CLAUSE_SIZE (c2) = size_zero_node;
8622 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
8623 OMP_CLAUSE_CHAIN (l) = c2;
8625 flags = GOVD_MAP | GOVD_EXPLICIT;
8626 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8627 flags |= GOVD_SEEN;
8628 goto do_add_decl;
8630 else
8632 tree *osc = struct_map_to_clause->get (decl);
8633 tree *sc = NULL, *scp = NULL;
8634 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8635 n->value |= GOVD_SEEN;
8636 poly_offset_int o1, o2;
8637 if (offset)
8638 o1 = wi::to_poly_offset (offset);
8639 else
8640 o1 = 0;
8641 if (maybe_ne (bitpos, 0))
8642 o1 += bits_to_bytes_round_down (bitpos);
8643 sc = &OMP_CLAUSE_CHAIN (*osc);
8644 if (*sc != c
8645 && (OMP_CLAUSE_MAP_KIND (*sc)
8646 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8647 sc = &OMP_CLAUSE_CHAIN (*sc);
8648 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
8649 if (ptr && sc == prev_list_p)
8650 break;
8651 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8652 != COMPONENT_REF
8653 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8654 != INDIRECT_REF)
8655 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8656 != ARRAY_REF))
8657 break;
8658 else
8660 tree offset2;
8661 poly_int64 bitsize2, bitpos2;
8662 base = OMP_CLAUSE_DECL (*sc);
8663 if (TREE_CODE (base) == ARRAY_REF)
8665 while (TREE_CODE (base) == ARRAY_REF)
8666 base = TREE_OPERAND (base, 0);
8667 if (TREE_CODE (base) != COMPONENT_REF
8668 || (TREE_CODE (TREE_TYPE (base))
8669 != ARRAY_TYPE))
8670 break;
8672 else if (TREE_CODE (base) == INDIRECT_REF
8673 && (TREE_CODE (TREE_OPERAND (base, 0))
8674 == COMPONENT_REF)
8675 && (TREE_CODE (TREE_TYPE
8676 (TREE_OPERAND (base, 0)))
8677 == REFERENCE_TYPE))
8678 base = TREE_OPERAND (base, 0);
8679 base = get_inner_reference (base, &bitsize2,
8680 &bitpos2, &offset2,
8681 &mode, &unsignedp,
8682 &reversep, &volatilep);
8683 if ((TREE_CODE (base) == INDIRECT_REF
8684 || (TREE_CODE (base) == MEM_REF
8685 && integer_zerop (TREE_OPERAND (base,
8686 1))))
8687 && DECL_P (TREE_OPERAND (base, 0))
8688 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8689 0)))
8690 == REFERENCE_TYPE))
8691 base = TREE_OPERAND (base, 0);
8692 if (base != decl)
8693 break;
8694 if (scp)
8695 continue;
8696 gcc_assert (offset == NULL_TREE
8697 || poly_int_tree_p (offset));
8698 tree d1 = OMP_CLAUSE_DECL (*sc);
8699 tree d2 = OMP_CLAUSE_DECL (c);
8700 while (TREE_CODE (d1) == ARRAY_REF)
8701 d1 = TREE_OPERAND (d1, 0);
8702 while (TREE_CODE (d2) == ARRAY_REF)
8703 d2 = TREE_OPERAND (d2, 0);
8704 if (TREE_CODE (d1) == INDIRECT_REF)
8705 d1 = TREE_OPERAND (d1, 0);
8706 if (TREE_CODE (d2) == INDIRECT_REF)
8707 d2 = TREE_OPERAND (d2, 0);
8708 while (TREE_CODE (d1) == COMPONENT_REF)
8709 if (TREE_CODE (d2) == COMPONENT_REF
8710 && TREE_OPERAND (d1, 1)
8711 == TREE_OPERAND (d2, 1))
8713 d1 = TREE_OPERAND (d1, 0);
8714 d2 = TREE_OPERAND (d2, 0);
8716 else
8717 break;
8718 if (d1 == d2)
8720 error_at (OMP_CLAUSE_LOCATION (c),
8721 "%qE appears more than once in map "
8722 "clauses", OMP_CLAUSE_DECL (c));
8723 remove = true;
8724 break;
8726 if (offset2)
8727 o2 = wi::to_poly_offset (offset2);
8728 else
8729 o2 = 0;
8730 o2 += bits_to_bytes_round_down (bitpos2);
8731 if (maybe_lt (o1, o2)
8732 || (known_eq (o1, o2)
8733 && maybe_lt (bitpos, bitpos2)))
8735 if (ptr)
8736 scp = sc;
8737 else
8738 break;
8741 if (remove)
8742 break;
8743 OMP_CLAUSE_SIZE (*osc)
8744 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8745 size_one_node);
8746 if (ptr)
8748 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8749 OMP_CLAUSE_MAP);
8750 tree cl = NULL_TREE;
8751 enum gomp_map_kind mkind
8752 = code == OMP_TARGET_EXIT_DATA
8753 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8754 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8755 OMP_CLAUSE_DECL (c2)
8756 = unshare_expr (OMP_CLAUSE_DECL (c));
8757 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8758 OMP_CLAUSE_SIZE (c2)
8759 = TYPE_SIZE_UNIT (ptr_type_node);
8760 cl = scp ? *prev_list_p : c2;
8761 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8763 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8764 tree c3
8765 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8766 OMP_CLAUSE_MAP);
8767 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8768 OMP_CLAUSE_DECL (c3)
8769 = unshare_expr (OMP_CLAUSE_DECL (c4));
8770 OMP_CLAUSE_SIZE (c3)
8771 = TYPE_SIZE_UNIT (ptr_type_node);
8772 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8773 if (!scp)
8774 OMP_CLAUSE_CHAIN (c2) = c3;
8775 else
8776 cl = c3;
8778 if (scp)
8779 *scp = c2;
8780 if (sc == prev_list_p)
8782 *sc = cl;
8783 prev_list_p = NULL;
8785 else
8787 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8788 list_p = prev_list_p;
8789 prev_list_p = NULL;
8790 OMP_CLAUSE_CHAIN (c) = *sc;
8791 *sc = cl;
8792 continue;
8795 else if (*sc != c)
8797 *list_p = OMP_CLAUSE_CHAIN (c);
8798 OMP_CLAUSE_CHAIN (c) = *sc;
8799 *sc = c;
8800 continue;
8804 if (!remove
8805 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8806 && OMP_CLAUSE_CHAIN (c)
8807 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8808 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8809 == GOMP_MAP_ALWAYS_POINTER))
8810 prev_list_p = list_p;
8811 break;
8813 flags = GOVD_MAP | GOVD_EXPLICIT;
8814 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8815 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8816 flags |= GOVD_MAP_ALWAYS_TO;
8817 goto do_add;
8819 case OMP_CLAUSE_DEPEND:
8820 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8822 tree deps = OMP_CLAUSE_DECL (c);
8823 while (deps && TREE_CODE (deps) == TREE_LIST)
8825 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8826 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8827 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8828 pre_p, NULL, is_gimple_val, fb_rvalue);
8829 deps = TREE_CHAIN (deps);
8831 break;
8833 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8834 break;
8835 if (handled_depend_iterators == -1)
8836 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
8837 if (handled_depend_iterators)
8839 if (handled_depend_iterators == 2)
8840 remove = true;
8841 break;
8843 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8845 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8846 NULL, is_gimple_val, fb_rvalue);
8847 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8849 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8851 remove = true;
8852 break;
8854 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8855 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8856 is_gimple_val, fb_rvalue) == GS_ERROR)
8858 remove = true;
8859 break;
8861 break;
8863 case OMP_CLAUSE_TO:
8864 case OMP_CLAUSE_FROM:
8865 case OMP_CLAUSE__CACHE_:
8866 decl = OMP_CLAUSE_DECL (c);
8867 if (error_operand_p (decl))
8869 remove = true;
8870 break;
8872 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8873 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8874 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8875 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8876 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8878 remove = true;
8879 break;
8881 if (!DECL_P (decl))
8883 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8884 NULL, is_gimple_lvalue, fb_lvalue)
8885 == GS_ERROR)
8887 remove = true;
8888 break;
8890 break;
8892 goto do_notice;
8894 case OMP_CLAUSE_USE_DEVICE_PTR:
8895 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8896 goto do_add;
8897 case OMP_CLAUSE_IS_DEVICE_PTR:
8898 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8899 goto do_add;
8901 do_add:
8902 decl = OMP_CLAUSE_DECL (c);
8903 do_add_decl:
8904 if (error_operand_p (decl))
8906 remove = true;
8907 break;
8909 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8911 tree t = omp_member_access_dummy_var (decl);
8912 if (t)
8914 tree v = DECL_VALUE_EXPR (decl);
8915 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8916 if (outer_ctx)
8917 omp_notice_variable (outer_ctx, t, true);
8920 if (code == OACC_DATA
8921 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8922 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8923 flags |= GOVD_MAP_0LEN_ARRAY;
8924 omp_add_variable (ctx, decl, flags);
8925 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8926 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
8927 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8928 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8930 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8931 GOVD_LOCAL | GOVD_SEEN);
8932 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8933 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8934 find_decl_expr,
8935 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8936 NULL) == NULL_TREE)
8937 omp_add_variable (ctx,
8938 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8939 GOVD_LOCAL | GOVD_SEEN);
8940 gimplify_omp_ctxp = ctx;
8941 push_gimplify_context ();
8943 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8944 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8946 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8947 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8948 pop_gimplify_context
8949 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8950 push_gimplify_context ();
8951 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8952 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8953 pop_gimplify_context
8954 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8955 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8956 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8958 gimplify_omp_ctxp = outer_ctx;
8960 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8961 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8963 gimplify_omp_ctxp = ctx;
8964 push_gimplify_context ();
8965 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8967 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8968 NULL, NULL);
8969 TREE_SIDE_EFFECTS (bind) = 1;
8970 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8971 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8973 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8974 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8975 pop_gimplify_context
8976 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8977 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8979 gimplify_omp_ctxp = outer_ctx;
8981 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8982 && OMP_CLAUSE_LINEAR_STMT (c))
8984 gimplify_omp_ctxp = ctx;
8985 push_gimplify_context ();
8986 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8988 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8989 NULL, NULL);
8990 TREE_SIDE_EFFECTS (bind) = 1;
8991 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8992 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8994 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8995 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8996 pop_gimplify_context
8997 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8998 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
9000 gimplify_omp_ctxp = outer_ctx;
9002 if (notice_outer)
9003 goto do_notice;
9004 break;
9006 case OMP_CLAUSE_COPYIN:
9007 case OMP_CLAUSE_COPYPRIVATE:
9008 decl = OMP_CLAUSE_DECL (c);
9009 if (error_operand_p (decl))
9011 remove = true;
9012 break;
9014 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
9015 && !remove
9016 && !omp_check_private (ctx, decl, true))
9018 remove = true;
9019 if (is_global_var (decl))
9021 if (DECL_THREAD_LOCAL_P (decl))
9022 remove = false;
9023 else if (DECL_HAS_VALUE_EXPR_P (decl))
9025 tree value = get_base_address (DECL_VALUE_EXPR (decl));
9027 if (value
9028 && DECL_P (value)
9029 && DECL_THREAD_LOCAL_P (value))
9030 remove = false;
9033 if (remove)
9034 error_at (OMP_CLAUSE_LOCATION (c),
9035 "copyprivate variable %qE is not threadprivate"
9036 " or private in outer context", DECL_NAME (decl));
9038 do_notice:
9039 if ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9040 && outer_ctx
9041 && outer_ctx->region_type == ORT_COMBINED_PARALLEL
9042 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9043 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9044 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE))
9046 splay_tree_node on
9047 = splay_tree_lookup (outer_ctx->variables,
9048 (splay_tree_key)decl);
9049 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9051 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9052 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9053 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9054 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9055 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9056 == POINTER_TYPE))))
9057 omp_firstprivatize_variable (outer_ctx, decl);
9058 else
9059 omp_add_variable (outer_ctx, decl,
9060 GOVD_SEEN | GOVD_SHARED);
9061 omp_notice_variable (outer_ctx, decl, true);
9064 if (outer_ctx)
9065 omp_notice_variable (outer_ctx, decl, true);
9066 if (check_non_private
9067 && region_type == ORT_WORKSHARE
9068 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9069 || decl == OMP_CLAUSE_DECL (c)
9070 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9071 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9072 == ADDR_EXPR
9073 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9074 == POINTER_PLUS_EXPR
9075 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9076 (OMP_CLAUSE_DECL (c), 0), 0))
9077 == ADDR_EXPR)))))
9078 && omp_check_private (ctx, decl, false))
9080 error ("%s variable %qE is private in outer context",
9081 check_non_private, DECL_NAME (decl));
9082 remove = true;
9084 break;
9086 case OMP_CLAUSE_IF:
9087 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9088 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9090 const char *p[2];
9091 for (int i = 0; i < 2; i++)
9092 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9094 case VOID_CST: p[i] = "cancel"; break;
9095 case OMP_PARALLEL: p[i] = "parallel"; break;
9096 case OMP_SIMD: p[i] = "simd"; break;
9097 case OMP_TASK: p[i] = "task"; break;
9098 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9099 case OMP_TARGET_DATA: p[i] = "target data"; break;
9100 case OMP_TARGET: p[i] = "target"; break;
9101 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9102 case OMP_TARGET_ENTER_DATA:
9103 p[i] = "target enter data"; break;
9104 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9105 default: gcc_unreachable ();
9107 error_at (OMP_CLAUSE_LOCATION (c),
9108 "expected %qs %<if%> clause modifier rather than %qs",
9109 p[0], p[1]);
9110 remove = true;
9112 /* Fall through. */
9114 case OMP_CLAUSE_FINAL:
9115 OMP_CLAUSE_OPERAND (c, 0)
9116 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9117 /* Fall through. */
9119 case OMP_CLAUSE_SCHEDULE:
9120 case OMP_CLAUSE_NUM_THREADS:
9121 case OMP_CLAUSE_NUM_TEAMS:
9122 case OMP_CLAUSE_THREAD_LIMIT:
9123 case OMP_CLAUSE_DIST_SCHEDULE:
9124 case OMP_CLAUSE_DEVICE:
9125 case OMP_CLAUSE_PRIORITY:
9126 case OMP_CLAUSE_GRAINSIZE:
9127 case OMP_CLAUSE_NUM_TASKS:
9128 case OMP_CLAUSE_HINT:
9129 case OMP_CLAUSE_ASYNC:
9130 case OMP_CLAUSE_WAIT:
9131 case OMP_CLAUSE_NUM_GANGS:
9132 case OMP_CLAUSE_NUM_WORKERS:
9133 case OMP_CLAUSE_VECTOR_LENGTH:
9134 case OMP_CLAUSE_WORKER:
9135 case OMP_CLAUSE_VECTOR:
9136 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9137 is_gimple_val, fb_rvalue) == GS_ERROR)
9138 remove = true;
9139 break;
9141 case OMP_CLAUSE_GANG:
9142 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9143 is_gimple_val, fb_rvalue) == GS_ERROR)
9144 remove = true;
9145 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9146 is_gimple_val, fb_rvalue) == GS_ERROR)
9147 remove = true;
9148 break;
9150 case OMP_CLAUSE_NOWAIT:
9151 nowait = 1;
9152 break;
9154 case OMP_CLAUSE_ORDERED:
9155 case OMP_CLAUSE_UNTIED:
9156 case OMP_CLAUSE_COLLAPSE:
9157 case OMP_CLAUSE_TILE:
9158 case OMP_CLAUSE_AUTO:
9159 case OMP_CLAUSE_SEQ:
9160 case OMP_CLAUSE_INDEPENDENT:
9161 case OMP_CLAUSE_MERGEABLE:
9162 case OMP_CLAUSE_PROC_BIND:
9163 case OMP_CLAUSE_SAFELEN:
9164 case OMP_CLAUSE_SIMDLEN:
9165 case OMP_CLAUSE_NOGROUP:
9166 case OMP_CLAUSE_THREADS:
9167 case OMP_CLAUSE_SIMD:
9168 case OMP_CLAUSE_IF_PRESENT:
9169 case OMP_CLAUSE_FINALIZE:
9170 break;
9172 case OMP_CLAUSE_DEFAULTMAP:
9173 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9174 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9176 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9177 gdmkmin = GDMK_SCALAR;
9178 gdmkmax = GDMK_POINTER;
9179 break;
9180 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9181 gdmkmin = gdmkmax = GDMK_SCALAR;
9182 break;
9183 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9184 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9185 break;
9186 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9187 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9188 break;
9189 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9190 gdmkmin = gdmkmax = GDMK_POINTER;
9191 break;
9192 default:
9193 gcc_unreachable ();
9195 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9196 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9198 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9199 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9200 break;
9201 case OMP_CLAUSE_DEFAULTMAP_TO:
9202 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9203 break;
9204 case OMP_CLAUSE_DEFAULTMAP_FROM:
9205 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9206 break;
9207 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9208 ctx->defaultmap[gdmk] = GOVD_MAP;
9209 break;
9210 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9211 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9212 break;
9213 case OMP_CLAUSE_DEFAULTMAP_NONE:
9214 ctx->defaultmap[gdmk] = 0;
9215 break;
9216 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9217 switch (gdmk)
9219 case GDMK_SCALAR:
9220 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9221 break;
9222 case GDMK_AGGREGATE:
9223 case GDMK_ALLOCATABLE:
9224 ctx->defaultmap[gdmk] = GOVD_MAP;
9225 break;
9226 case GDMK_POINTER:
9227 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9228 break;
9229 default:
9230 gcc_unreachable ();
9232 break;
9233 default:
9234 gcc_unreachable ();
9236 break;
9238 case OMP_CLAUSE_ALIGNED:
9239 decl = OMP_CLAUSE_DECL (c);
9240 if (error_operand_p (decl))
9242 remove = true;
9243 break;
9245 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9246 is_gimple_val, fb_rvalue) == GS_ERROR)
9248 remove = true;
9249 break;
9251 if (!is_global_var (decl)
9252 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9253 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9254 break;
9256 case OMP_CLAUSE_NONTEMPORAL:
9257 decl = OMP_CLAUSE_DECL (c);
9258 if (error_operand_p (decl))
9260 remove = true;
9261 break;
9263 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9264 break;
9266 case OMP_CLAUSE_DEFAULT:
9267 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9268 break;
9270 default:
9271 gcc_unreachable ();
9274 if (code == OACC_DATA
9275 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9276 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9277 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9278 remove = true;
9279 if (remove)
9280 *list_p = OMP_CLAUSE_CHAIN (c);
9281 else
9282 list_p = &OMP_CLAUSE_CHAIN (c);
9285 gimplify_omp_ctxp = ctx;
9286 if (struct_map_to_clause)
9287 delete struct_map_to_clause;
9290 /* Return true if DECL is a candidate for shared to firstprivate
9291 optimization. We only consider non-addressable scalars, not
9292 too big, and not references. */
9294 static bool
9295 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
9297 if (TREE_ADDRESSABLE (decl))
9298 return false;
9299 tree type = TREE_TYPE (decl);
9300 if (!is_gimple_reg_type (type)
9301 || TREE_CODE (type) == REFERENCE_TYPE
9302 || TREE_ADDRESSABLE (type))
9303 return false;
9304 /* Don't optimize too large decls, as each thread/task will have
9305 its own. */
9306 HOST_WIDE_INT len = int_size_in_bytes (type);
9307 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
9308 return false;
9309 if (lang_hooks.decls.omp_privatize_by_reference (decl))
9310 return false;
9311 return true;
9314 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9315 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9316 GOVD_WRITTEN in outer contexts. */
9318 static void
9319 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
9321 for (; ctx; ctx = ctx->outer_context)
9323 splay_tree_node n = splay_tree_lookup (ctx->variables,
9324 (splay_tree_key) decl);
9325 if (n == NULL)
9326 continue;
9327 else if (n->value & GOVD_SHARED)
9329 n->value |= GOVD_WRITTEN;
9330 return;
9332 else if (n->value & GOVD_DATA_SHARE_CLASS)
9333 return;
9337 /* Helper callback for walk_gimple_seq to discover possible stores
9338 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9339 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9340 for those. */
9342 static tree
9343 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
9345 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
9347 *walk_subtrees = 0;
9348 if (!wi->is_lhs)
9349 return NULL_TREE;
9351 tree op = *tp;
9354 if (handled_component_p (op))
9355 op = TREE_OPERAND (op, 0);
9356 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
9357 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
9358 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
9359 else
9360 break;
9362 while (1);
9363 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
9364 return NULL_TREE;
9366 omp_mark_stores (gimplify_omp_ctxp, op);
9367 return NULL_TREE;
9370 /* Helper callback for walk_gimple_seq to discover possible stores
9371 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9372 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9373 for those. */
9375 static tree
9376 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
9377 bool *handled_ops_p,
9378 struct walk_stmt_info *wi)
9380 gimple *stmt = gsi_stmt (*gsi_p);
9381 switch (gimple_code (stmt))
9383 /* Don't recurse on OpenMP constructs for which
9384 gimplify_adjust_omp_clauses already handled the bodies,
9385 except handle gimple_omp_for_pre_body. */
9386 case GIMPLE_OMP_FOR:
9387 *handled_ops_p = true;
9388 if (gimple_omp_for_pre_body (stmt))
9389 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9390 omp_find_stores_stmt, omp_find_stores_op, wi);
9391 break;
9392 case GIMPLE_OMP_PARALLEL:
9393 case GIMPLE_OMP_TASK:
9394 case GIMPLE_OMP_SECTIONS:
9395 case GIMPLE_OMP_SINGLE:
9396 case GIMPLE_OMP_TARGET:
9397 case GIMPLE_OMP_TEAMS:
9398 case GIMPLE_OMP_CRITICAL:
9399 *handled_ops_p = true;
9400 break;
9401 default:
9402 break;
9404 return NULL_TREE;
9407 struct gimplify_adjust_omp_clauses_data
9409 tree *list_p;
9410 gimple_seq *pre_p;
9413 /* For all variables that were not actually used within the context,
9414 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9416 static int
9417 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
9419 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
9420 gimple_seq *pre_p
9421 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
9422 tree decl = (tree) n->key;
9423 unsigned flags = n->value;
9424 enum omp_clause_code code;
9425 tree clause;
9426 bool private_debug;
9428 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
9429 return 0;
9430 if ((flags & GOVD_SEEN) == 0)
9431 return 0;
9432 if (flags & GOVD_DEBUG_PRIVATE)
9434 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
9435 private_debug = true;
9437 else if (flags & GOVD_MAP)
9438 private_debug = false;
9439 else
9440 private_debug
9441 = lang_hooks.decls.omp_private_debug_clause (decl,
9442 !!(flags & GOVD_SHARED));
9443 if (private_debug)
9444 code = OMP_CLAUSE_PRIVATE;
9445 else if (flags & GOVD_MAP)
9447 code = OMP_CLAUSE_MAP;
9448 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9449 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9451 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
9452 return 0;
9455 else if (flags & GOVD_SHARED)
9457 if (is_global_var (decl))
9459 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9460 while (ctx != NULL)
9462 splay_tree_node on
9463 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9464 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
9465 | GOVD_PRIVATE | GOVD_REDUCTION
9466 | GOVD_LINEAR | GOVD_MAP)) != 0)
9467 break;
9468 ctx = ctx->outer_context;
9470 if (ctx == NULL)
9471 return 0;
9473 code = OMP_CLAUSE_SHARED;
9475 else if (flags & GOVD_PRIVATE)
9476 code = OMP_CLAUSE_PRIVATE;
9477 else if (flags & GOVD_FIRSTPRIVATE)
9479 code = OMP_CLAUSE_FIRSTPRIVATE;
9480 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
9481 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9482 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9484 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9485 "%<target%> construct", decl);
9486 return 0;
9489 else if (flags & GOVD_LASTPRIVATE)
9490 code = OMP_CLAUSE_LASTPRIVATE;
9491 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
9492 return 0;
9493 else
9494 gcc_unreachable ();
9496 if (((flags & GOVD_LASTPRIVATE)
9497 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
9498 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9499 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9501 tree chain = *list_p;
9502 clause = build_omp_clause (input_location, code);
9503 OMP_CLAUSE_DECL (clause) = decl;
9504 OMP_CLAUSE_CHAIN (clause) = chain;
9505 if (private_debug)
9506 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
9507 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
9508 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
9509 else if (code == OMP_CLAUSE_SHARED
9510 && (flags & GOVD_WRITTEN) == 0
9511 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9512 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
9513 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
9514 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
9515 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
9517 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
9518 OMP_CLAUSE_DECL (nc) = decl;
9519 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9520 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
9521 OMP_CLAUSE_DECL (clause)
9522 = build_simple_mem_ref_loc (input_location, decl);
9523 OMP_CLAUSE_DECL (clause)
9524 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
9525 build_int_cst (build_pointer_type (char_type_node), 0));
9526 OMP_CLAUSE_SIZE (clause) = size_zero_node;
9527 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9528 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
9529 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
9530 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9531 OMP_CLAUSE_CHAIN (nc) = chain;
9532 OMP_CLAUSE_CHAIN (clause) = nc;
9533 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9534 gimplify_omp_ctxp = ctx->outer_context;
9535 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
9536 pre_p, NULL, is_gimple_val, fb_rvalue);
9537 gimplify_omp_ctxp = ctx;
9539 else if (code == OMP_CLAUSE_MAP)
9541 int kind;
9542 /* Not all combinations of these GOVD_MAP flags are actually valid. */
9543 switch (flags & (GOVD_MAP_TO_ONLY
9544 | GOVD_MAP_FORCE
9545 | GOVD_MAP_FORCE_PRESENT
9546 | GOVD_MAP_ALLOC_ONLY
9547 | GOVD_MAP_FROM_ONLY))
9549 case 0:
9550 kind = GOMP_MAP_TOFROM;
9551 break;
9552 case GOVD_MAP_FORCE:
9553 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
9554 break;
9555 case GOVD_MAP_TO_ONLY:
9556 kind = GOMP_MAP_TO;
9557 break;
9558 case GOVD_MAP_FROM_ONLY:
9559 kind = GOMP_MAP_FROM;
9560 break;
9561 case GOVD_MAP_ALLOC_ONLY:
9562 kind = GOMP_MAP_ALLOC;
9563 break;
9564 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
9565 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
9566 break;
9567 case GOVD_MAP_FORCE_PRESENT:
9568 kind = GOMP_MAP_FORCE_PRESENT;
9569 break;
9570 default:
9571 gcc_unreachable ();
9573 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
9574 if (DECL_SIZE (decl)
9575 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9577 tree decl2 = DECL_VALUE_EXPR (decl);
9578 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9579 decl2 = TREE_OPERAND (decl2, 0);
9580 gcc_assert (DECL_P (decl2));
9581 tree mem = build_simple_mem_ref (decl2);
9582 OMP_CLAUSE_DECL (clause) = mem;
9583 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9584 if (gimplify_omp_ctxp->outer_context)
9586 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9587 omp_notice_variable (ctx, decl2, true);
9588 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
9590 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
9591 OMP_CLAUSE_MAP);
9592 OMP_CLAUSE_DECL (nc) = decl;
9593 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9594 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
9595 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9596 else
9597 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9598 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
9599 OMP_CLAUSE_CHAIN (clause) = nc;
9601 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
9602 && lang_hooks.decls.omp_privatize_by_reference (decl))
9604 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
9605 OMP_CLAUSE_SIZE (clause)
9606 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
9607 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9608 gimplify_omp_ctxp = ctx->outer_context;
9609 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
9610 pre_p, NULL, is_gimple_val, fb_rvalue);
9611 gimplify_omp_ctxp = ctx;
9612 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
9613 OMP_CLAUSE_MAP);
9614 OMP_CLAUSE_DECL (nc) = decl;
9615 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9616 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
9617 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
9618 OMP_CLAUSE_CHAIN (clause) = nc;
9620 else
9621 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
9623 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
9625 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
9626 OMP_CLAUSE_DECL (nc) = decl;
9627 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
9628 OMP_CLAUSE_CHAIN (nc) = chain;
9629 OMP_CLAUSE_CHAIN (clause) = nc;
9630 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9631 gimplify_omp_ctxp = ctx->outer_context;
9632 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9633 gimplify_omp_ctxp = ctx;
9635 *list_p = clause;
9636 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9637 gimplify_omp_ctxp = ctx->outer_context;
9638 lang_hooks.decls.omp_finish_clause (clause, pre_p);
9639 if (gimplify_omp_ctxp)
9640 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
9641 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
9642 && DECL_P (OMP_CLAUSE_SIZE (clause)))
9643 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
9644 true);
9645 gimplify_omp_ctxp = ctx;
9646 return 0;
9649 static void
9650 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
9651 enum tree_code code)
9653 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9654 tree c, decl;
9656 if (body)
9658 struct gimplify_omp_ctx *octx;
9659 for (octx = ctx; octx; octx = octx->outer_context)
9660 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
9661 break;
9662 if (octx)
9664 struct walk_stmt_info wi;
9665 memset (&wi, 0, sizeof (wi));
9666 walk_gimple_seq (body, omp_find_stores_stmt,
9667 omp_find_stores_op, &wi);
9670 while ((c = *list_p) != NULL)
9672 splay_tree_node n;
9673 bool remove = false;
9675 switch (OMP_CLAUSE_CODE (c))
9677 case OMP_CLAUSE_FIRSTPRIVATE:
9678 if ((ctx->region_type & ORT_TARGET)
9679 && (ctx->region_type & ORT_ACC) == 0
9680 && TYPE_ATOMIC (strip_array_types
9681 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
9683 error_at (OMP_CLAUSE_LOCATION (c),
9684 "%<_Atomic%> %qD in %<firstprivate%> clause on "
9685 "%<target%> construct", OMP_CLAUSE_DECL (c));
9686 remove = true;
9687 break;
9689 /* FALLTHRU */
9690 case OMP_CLAUSE_PRIVATE:
9691 case OMP_CLAUSE_SHARED:
9692 case OMP_CLAUSE_LINEAR:
9693 decl = OMP_CLAUSE_DECL (c);
9694 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9695 remove = !(n->value & GOVD_SEEN);
9696 if (! remove)
9698 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
9699 if ((n->value & GOVD_DEBUG_PRIVATE)
9700 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
9702 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
9703 || ((n->value & GOVD_DATA_SHARE_CLASS)
9704 == GOVD_SHARED));
9705 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
9706 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
9708 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9709 && (n->value & GOVD_WRITTEN) == 0
9710 && DECL_P (decl)
9711 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9712 OMP_CLAUSE_SHARED_READONLY (c) = 1;
9713 else if (DECL_P (decl)
9714 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9715 && (n->value & GOVD_WRITTEN) != 0)
9716 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9717 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
9718 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9719 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9721 break;
9723 case OMP_CLAUSE_LASTPRIVATE:
9724 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
9725 accurately reflect the presence of a FIRSTPRIVATE clause. */
9726 decl = OMP_CLAUSE_DECL (c);
9727 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9728 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
9729 = (n->value & GOVD_FIRSTPRIVATE) != 0;
9730 if (code == OMP_DISTRIBUTE
9731 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9733 remove = true;
9734 error_at (OMP_CLAUSE_LOCATION (c),
9735 "same variable used in %<firstprivate%> and "
9736 "%<lastprivate%> clauses on %<distribute%> "
9737 "construct");
9739 if (!remove
9740 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9741 && DECL_P (decl)
9742 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9743 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9744 break;
9746 case OMP_CLAUSE_ALIGNED:
9747 decl = OMP_CLAUSE_DECL (c);
9748 if (!is_global_var (decl))
9750 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9751 remove = n == NULL || !(n->value & GOVD_SEEN);
9752 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9754 struct gimplify_omp_ctx *octx;
9755 if (n != NULL
9756 && (n->value & (GOVD_DATA_SHARE_CLASS
9757 & ~GOVD_FIRSTPRIVATE)))
9758 remove = true;
9759 else
9760 for (octx = ctx->outer_context; octx;
9761 octx = octx->outer_context)
9763 n = splay_tree_lookup (octx->variables,
9764 (splay_tree_key) decl);
9765 if (n == NULL)
9766 continue;
9767 if (n->value & GOVD_LOCAL)
9768 break;
9769 /* We have to avoid assigning a shared variable
9770 to itself when trying to add
9771 __builtin_assume_aligned. */
9772 if (n->value & GOVD_SHARED)
9774 remove = true;
9775 break;
9780 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
9782 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9783 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9784 remove = true;
9786 break;
9788 case OMP_CLAUSE_NONTEMPORAL:
9789 decl = OMP_CLAUSE_DECL (c);
9790 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9791 remove = n == NULL || !(n->value & GOVD_SEEN);
9792 break;
9794 case OMP_CLAUSE_MAP:
9795 if (code == OMP_TARGET_EXIT_DATA
9796 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
9798 remove = true;
9799 break;
9801 decl = OMP_CLAUSE_DECL (c);
9802 /* Data clauses associated with acc parallel reductions must be
9803 compatible with present_or_copy. Warn and adjust the clause
9804 if that is not the case. */
9805 if (ctx->region_type == ORT_ACC_PARALLEL)
9807 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9808 n = NULL;
9810 if (DECL_P (t))
9811 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9813 if (n && (n->value & GOVD_REDUCTION))
9815 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9817 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9818 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9819 && kind != GOMP_MAP_FORCE_PRESENT
9820 && kind != GOMP_MAP_POINTER)
9822 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9823 "incompatible data clause with reduction "
9824 "on %qE; promoting to present_or_copy",
9825 DECL_NAME (t));
9826 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9830 if (!DECL_P (decl))
9832 if ((ctx->region_type & ORT_TARGET) != 0
9833 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9835 if (TREE_CODE (decl) == INDIRECT_REF
9836 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9837 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9838 == REFERENCE_TYPE))
9839 decl = TREE_OPERAND (decl, 0);
9840 if (TREE_CODE (decl) == COMPONENT_REF)
9842 while (TREE_CODE (decl) == COMPONENT_REF)
9843 decl = TREE_OPERAND (decl, 0);
9844 if (DECL_P (decl))
9846 n = splay_tree_lookup (ctx->variables,
9847 (splay_tree_key) decl);
9848 if (!(n->value & GOVD_SEEN))
9849 remove = true;
9853 break;
9855 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9856 if ((ctx->region_type & ORT_TARGET) != 0
9857 && !(n->value & GOVD_SEEN)
9858 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9859 && (!is_global_var (decl)
9860 || !lookup_attribute ("omp declare target link",
9861 DECL_ATTRIBUTES (decl))))
9863 remove = true;
9864 /* For struct element mapping, if struct is never referenced
9865 in target block and none of the mapping has always modifier,
9866 remove all the struct element mappings, which immediately
9867 follow the GOMP_MAP_STRUCT map clause. */
9868 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9870 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9871 while (cnt--)
9872 OMP_CLAUSE_CHAIN (c)
9873 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9876 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9877 && code == OMP_TARGET_EXIT_DATA)
9878 remove = true;
9879 else if (DECL_SIZE (decl)
9880 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9881 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9882 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9883 && (OMP_CLAUSE_MAP_KIND (c)
9884 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9886 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9887 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9888 INTEGER_CST. */
9889 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9891 tree decl2 = DECL_VALUE_EXPR (decl);
9892 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9893 decl2 = TREE_OPERAND (decl2, 0);
9894 gcc_assert (DECL_P (decl2));
9895 tree mem = build_simple_mem_ref (decl2);
9896 OMP_CLAUSE_DECL (c) = mem;
9897 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9898 if (ctx->outer_context)
9900 omp_notice_variable (ctx->outer_context, decl2, true);
9901 omp_notice_variable (ctx->outer_context,
9902 OMP_CLAUSE_SIZE (c), true);
9904 if (((ctx->region_type & ORT_TARGET) != 0
9905 || !ctx->target_firstprivatize_array_bases)
9906 && ((n->value & GOVD_SEEN) == 0
9907 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9909 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9910 OMP_CLAUSE_MAP);
9911 OMP_CLAUSE_DECL (nc) = decl;
9912 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9913 if (ctx->target_firstprivatize_array_bases)
9914 OMP_CLAUSE_SET_MAP_KIND (nc,
9915 GOMP_MAP_FIRSTPRIVATE_POINTER);
9916 else
9917 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9918 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9919 OMP_CLAUSE_CHAIN (c) = nc;
9920 c = nc;
9923 else
9925 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9926 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9927 gcc_assert ((n->value & GOVD_SEEN) == 0
9928 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9929 == 0));
9931 break;
9933 case OMP_CLAUSE_TO:
9934 case OMP_CLAUSE_FROM:
9935 case OMP_CLAUSE__CACHE_:
9936 decl = OMP_CLAUSE_DECL (c);
9937 if (!DECL_P (decl))
9938 break;
9939 if (DECL_SIZE (decl)
9940 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9942 tree decl2 = DECL_VALUE_EXPR (decl);
9943 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9944 decl2 = TREE_OPERAND (decl2, 0);
9945 gcc_assert (DECL_P (decl2));
9946 tree mem = build_simple_mem_ref (decl2);
9947 OMP_CLAUSE_DECL (c) = mem;
9948 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9949 if (ctx->outer_context)
9951 omp_notice_variable (ctx->outer_context, decl2, true);
9952 omp_notice_variable (ctx->outer_context,
9953 OMP_CLAUSE_SIZE (c), true);
9956 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9957 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9958 break;
9960 case OMP_CLAUSE_REDUCTION:
9961 case OMP_CLAUSE_IN_REDUCTION:
9962 case OMP_CLAUSE_TASK_REDUCTION:
9963 decl = OMP_CLAUSE_DECL (c);
9964 /* OpenACC reductions need a present_or_copy data clause.
9965 Add one if necessary. Emit error when the reduction is private. */
9966 if (ctx->region_type == ORT_ACC_PARALLEL)
9968 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9969 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9971 remove = true;
9972 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9973 "reduction on %qE", DECL_NAME (decl));
9975 else if ((n->value & GOVD_MAP) == 0)
9977 tree next = OMP_CLAUSE_CHAIN (c);
9978 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9979 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9980 OMP_CLAUSE_DECL (nc) = decl;
9981 OMP_CLAUSE_CHAIN (c) = nc;
9982 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9983 while (1)
9985 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9986 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9987 break;
9988 nc = OMP_CLAUSE_CHAIN (nc);
9990 OMP_CLAUSE_CHAIN (nc) = next;
9991 n->value |= GOVD_MAP;
9994 if (DECL_P (decl)
9995 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9996 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9997 break;
9998 case OMP_CLAUSE_COPYIN:
9999 case OMP_CLAUSE_COPYPRIVATE:
10000 case OMP_CLAUSE_IF:
10001 case OMP_CLAUSE_NUM_THREADS:
10002 case OMP_CLAUSE_NUM_TEAMS:
10003 case OMP_CLAUSE_THREAD_LIMIT:
10004 case OMP_CLAUSE_DIST_SCHEDULE:
10005 case OMP_CLAUSE_DEVICE:
10006 case OMP_CLAUSE_SCHEDULE:
10007 case OMP_CLAUSE_NOWAIT:
10008 case OMP_CLAUSE_ORDERED:
10009 case OMP_CLAUSE_DEFAULT:
10010 case OMP_CLAUSE_UNTIED:
10011 case OMP_CLAUSE_COLLAPSE:
10012 case OMP_CLAUSE_FINAL:
10013 case OMP_CLAUSE_MERGEABLE:
10014 case OMP_CLAUSE_PROC_BIND:
10015 case OMP_CLAUSE_SAFELEN:
10016 case OMP_CLAUSE_SIMDLEN:
10017 case OMP_CLAUSE_DEPEND:
10018 case OMP_CLAUSE_PRIORITY:
10019 case OMP_CLAUSE_GRAINSIZE:
10020 case OMP_CLAUSE_NUM_TASKS:
10021 case OMP_CLAUSE_NOGROUP:
10022 case OMP_CLAUSE_THREADS:
10023 case OMP_CLAUSE_SIMD:
10024 case OMP_CLAUSE_HINT:
10025 case OMP_CLAUSE_DEFAULTMAP:
10026 case OMP_CLAUSE_USE_DEVICE_PTR:
10027 case OMP_CLAUSE_IS_DEVICE_PTR:
10028 case OMP_CLAUSE_ASYNC:
10029 case OMP_CLAUSE_WAIT:
10030 case OMP_CLAUSE_INDEPENDENT:
10031 case OMP_CLAUSE_NUM_GANGS:
10032 case OMP_CLAUSE_NUM_WORKERS:
10033 case OMP_CLAUSE_VECTOR_LENGTH:
10034 case OMP_CLAUSE_GANG:
10035 case OMP_CLAUSE_WORKER:
10036 case OMP_CLAUSE_VECTOR:
10037 case OMP_CLAUSE_AUTO:
10038 case OMP_CLAUSE_SEQ:
10039 case OMP_CLAUSE_TILE:
10040 case OMP_CLAUSE_IF_PRESENT:
10041 case OMP_CLAUSE_FINALIZE:
10042 break;
10044 default:
10045 gcc_unreachable ();
10048 if (remove)
10049 *list_p = OMP_CLAUSE_CHAIN (c);
10050 else
10051 list_p = &OMP_CLAUSE_CHAIN (c);
10054 /* Add in any implicit data sharing. */
10055 struct gimplify_adjust_omp_clauses_data data;
10056 data.list_p = list_p;
10057 data.pre_p = pre_p;
10058 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
10060 gimplify_omp_ctxp = ctx->outer_context;
10061 delete_omp_context (ctx);
10064 /* Gimplify OACC_CACHE. */
10066 static void
10067 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
10069 tree expr = *expr_p;
10071 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
10072 OACC_CACHE);
10073 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
10074 OACC_CACHE);
10076 /* TODO: Do something sensible with this information. */
10078 *expr_p = NULL_TREE;
10081 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10082 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10083 kind. The entry kind will replace the one in CLAUSE, while the exit
10084 kind will be used in a new omp_clause and returned to the caller. */
10086 static tree
10087 gimplify_oacc_declare_1 (tree clause)
10089 HOST_WIDE_INT kind, new_op;
10090 bool ret = false;
10091 tree c = NULL;
10093 kind = OMP_CLAUSE_MAP_KIND (clause);
10095 switch (kind)
10097 case GOMP_MAP_ALLOC:
10098 new_op = GOMP_MAP_RELEASE;
10099 ret = true;
10100 break;
10102 case GOMP_MAP_FROM:
10103 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
10104 new_op = GOMP_MAP_FROM;
10105 ret = true;
10106 break;
10108 case GOMP_MAP_TOFROM:
10109 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
10110 new_op = GOMP_MAP_FROM;
10111 ret = true;
10112 break;
10114 case GOMP_MAP_DEVICE_RESIDENT:
10115 case GOMP_MAP_FORCE_DEVICEPTR:
10116 case GOMP_MAP_FORCE_PRESENT:
10117 case GOMP_MAP_LINK:
10118 case GOMP_MAP_POINTER:
10119 case GOMP_MAP_TO:
10120 break;
10122 default:
10123 gcc_unreachable ();
10124 break;
10127 if (ret)
10129 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
10130 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
10131 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
10134 return c;
10137 /* Gimplify OACC_DECLARE. */
10139 static void
10140 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
10142 tree expr = *expr_p;
10143 gomp_target *stmt;
10144 tree clauses, t, decl;
10146 clauses = OACC_DECLARE_CLAUSES (expr);
10148 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
10149 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
10151 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
10153 decl = OMP_CLAUSE_DECL (t);
10155 if (TREE_CODE (decl) == MEM_REF)
10156 decl = TREE_OPERAND (decl, 0);
10158 if (VAR_P (decl) && !is_oacc_declared (decl))
10160 tree attr = get_identifier ("oacc declare target");
10161 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
10162 DECL_ATTRIBUTES (decl));
10165 if (VAR_P (decl)
10166 && !is_global_var (decl)
10167 && DECL_CONTEXT (decl) == current_function_decl)
10169 tree c = gimplify_oacc_declare_1 (t);
10170 if (c)
10172 if (oacc_declare_returns == NULL)
10173 oacc_declare_returns = new hash_map<tree, tree>;
10175 oacc_declare_returns->put (decl, c);
10179 if (gimplify_omp_ctxp)
10180 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
10183 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
10184 clauses);
10186 gimplify_seq_add_stmt (pre_p, stmt);
10188 *expr_p = NULL_TREE;
10191 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
10192 gimplification of the body, as well as scanning the body for used
10193 variables. We need to do this scan now, because variable-sized
10194 decls will be decomposed during gimplification. */
10196 static void
10197 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
10199 tree expr = *expr_p;
10200 gimple *g;
10201 gimple_seq body = NULL;
10203 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
10204 OMP_PARALLEL_COMBINED (expr)
10205 ? ORT_COMBINED_PARALLEL
10206 : ORT_PARALLEL, OMP_PARALLEL);
10208 push_gimplify_context ();
10210 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
10211 if (gimple_code (g) == GIMPLE_BIND)
10212 pop_gimplify_context (g);
10213 else
10214 pop_gimplify_context (NULL);
10216 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
10217 OMP_PARALLEL);
10219 g = gimple_build_omp_parallel (body,
10220 OMP_PARALLEL_CLAUSES (expr),
10221 NULL_TREE, NULL_TREE);
10222 if (OMP_PARALLEL_COMBINED (expr))
10223 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
10224 gimplify_seq_add_stmt (pre_p, g);
10225 *expr_p = NULL_TREE;
10228 /* Gimplify the contents of an OMP_TASK statement. This involves
10229 gimplification of the body, as well as scanning the body for used
10230 variables. We need to do this scan now, because variable-sized
10231 decls will be decomposed during gimplification. */
10233 static void
10234 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
10236 tree expr = *expr_p;
10237 gimple *g;
10238 gimple_seq body = NULL;
10240 if (OMP_TASK_BODY (expr) == NULL_TREE)
10241 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10242 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10243 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
10245 error_at (OMP_CLAUSE_LOCATION (c),
10246 "%<mutexinoutset%> kind in %<depend%> clause on a "
10247 "%<taskwait%> construct");
10248 break;
10251 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
10252 omp_find_clause (OMP_TASK_CLAUSES (expr),
10253 OMP_CLAUSE_UNTIED)
10254 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
10256 if (OMP_TASK_BODY (expr))
10258 push_gimplify_context ();
10260 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
10261 if (gimple_code (g) == GIMPLE_BIND)
10262 pop_gimplify_context (g);
10263 else
10264 pop_gimplify_context (NULL);
10267 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
10268 OMP_TASK);
10270 g = gimple_build_omp_task (body,
10271 OMP_TASK_CLAUSES (expr),
10272 NULL_TREE, NULL_TREE,
10273 NULL_TREE, NULL_TREE, NULL_TREE);
10274 if (OMP_TASK_BODY (expr) == NULL_TREE)
10275 gimple_omp_task_set_taskwait_p (g, true);
10276 gimplify_seq_add_stmt (pre_p, g);
10277 *expr_p = NULL_TREE;
10280 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
10281 with non-NULL OMP_FOR_INIT. Also, fill in pdata array,
10282 pdata[0] non-NULL if there is anything non-trivial in between, pdata[1]
10283 is address of OMP_PARALLEL in between if any, pdata[2] is address of
10284 OMP_FOR in between if any and pdata[3] is address of the inner
10285 OMP_FOR/OMP_SIMD. */
10287 static tree
10288 find_combined_omp_for (tree *tp, int *walk_subtrees, void *data)
10290 tree **pdata = (tree **) data;
10291 *walk_subtrees = 0;
10292 switch (TREE_CODE (*tp))
10294 case OMP_FOR:
10295 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10297 pdata[3] = tp;
10298 return *tp;
10300 pdata[2] = tp;
10301 *walk_subtrees = 1;
10302 break;
10303 case OMP_SIMD:
10304 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10306 pdata[3] = tp;
10307 return *tp;
10309 break;
10310 case BIND_EXPR:
10311 if (BIND_EXPR_VARS (*tp)
10312 || (BIND_EXPR_BLOCK (*tp)
10313 && BLOCK_VARS (BIND_EXPR_BLOCK (*tp))))
10314 pdata[0] = tp;
10315 *walk_subtrees = 1;
10316 break;
10317 case STATEMENT_LIST:
10318 if (!tsi_one_before_end_p (tsi_start (*tp)))
10319 pdata[0] = tp;
10320 *walk_subtrees = 1;
10321 break;
10322 case TRY_FINALLY_EXPR:
10323 pdata[0] = tp;
10324 *walk_subtrees = 1;
10325 break;
10326 case OMP_PARALLEL:
10327 pdata[1] = tp;
10328 *walk_subtrees = 1;
10329 break;
10330 default:
10331 break;
10333 return NULL_TREE;
10336 /* Gimplify the gross structure of an OMP_FOR statement. */
10338 static enum gimplify_status
10339 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
10341 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
10342 enum gimplify_status ret = GS_ALL_DONE;
10343 enum gimplify_status tret;
10344 gomp_for *gfor;
10345 gimple_seq for_body, for_pre_body;
10346 int i;
10347 bitmap has_decl_expr = NULL;
10348 enum omp_region_type ort = ORT_WORKSHARE;
10350 orig_for_stmt = for_stmt = *expr_p;
10352 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
10354 tree *data[4] = { NULL, NULL, NULL, NULL };
10355 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
10356 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
10357 find_combined_omp_for, data, NULL);
10358 if (inner_for_stmt == NULL_TREE)
10360 gcc_assert (seen_error ());
10361 *expr_p = NULL_TREE;
10362 return GS_ERROR;
10364 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
10366 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
10367 &OMP_FOR_PRE_BODY (for_stmt));
10368 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
10370 if (OMP_FOR_PRE_BODY (inner_for_stmt))
10372 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
10373 &OMP_FOR_PRE_BODY (for_stmt));
10374 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
10377 if (data[0])
10379 /* We have some statements or variable declarations in between
10380 the composite construct directives. Move them around the
10381 inner_for_stmt. */
10382 data[0] = expr_p;
10383 for (i = 0; i < 3; i++)
10384 if (data[i])
10386 tree t = *data[i];
10387 if (i < 2 && data[i + 1] == &OMP_BODY (t))
10388 data[i + 1] = data[i];
10389 *data[i] = OMP_BODY (t);
10390 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
10391 NULL_TREE, make_node (BLOCK));
10392 OMP_BODY (t) = body;
10393 append_to_statement_list_force (inner_for_stmt,
10394 &BIND_EXPR_BODY (body));
10395 *data[3] = t;
10396 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
10397 gcc_assert (*data[3] == inner_for_stmt);
10399 return GS_OK;
10402 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
10403 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
10404 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10405 i)) == TREE_LIST
10406 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10407 i)))
10409 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
10410 /* Class iterators aren't allowed on OMP_SIMD, so the only
10411 case we need to solve is distribute parallel for. */
10412 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
10413 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
10414 && data[1]);
10415 tree orig_decl = TREE_PURPOSE (orig);
10416 tree last = TREE_VALUE (orig);
10417 tree *pc;
10418 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
10419 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
10420 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
10421 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
10422 && OMP_CLAUSE_DECL (*pc) == orig_decl)
10423 break;
10424 if (*pc == NULL_TREE)
10426 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
10428 /* private clause will appear only on inner_for_stmt.
10429 Change it into firstprivate, and add private clause
10430 on for_stmt. */
10431 tree c = copy_node (*pc);
10432 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10433 OMP_FOR_CLAUSES (for_stmt) = c;
10434 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
10435 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
10437 else
10439 /* lastprivate clause will appear on both inner_for_stmt
10440 and for_stmt. Add firstprivate clause to
10441 inner_for_stmt. */
10442 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
10443 OMP_CLAUSE_FIRSTPRIVATE);
10444 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
10445 OMP_CLAUSE_CHAIN (c) = *pc;
10446 *pc = c;
10447 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
10449 tree c = build_omp_clause (UNKNOWN_LOCATION,
10450 OMP_CLAUSE_FIRSTPRIVATE);
10451 OMP_CLAUSE_DECL (c) = last;
10452 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10453 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10454 c = build_omp_clause (UNKNOWN_LOCATION,
10455 *pc ? OMP_CLAUSE_SHARED
10456 : OMP_CLAUSE_FIRSTPRIVATE);
10457 OMP_CLAUSE_DECL (c) = orig_decl;
10458 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10459 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10461 /* Similarly, take care of C++ range for temporaries, those should
10462 be firstprivate on OMP_PARALLEL if any. */
10463 if (data[1])
10464 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
10465 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
10466 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10467 i)) == TREE_LIST
10468 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10469 i)))
10471 tree orig
10472 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
10473 tree v = TREE_CHAIN (orig);
10474 tree c = build_omp_clause (UNKNOWN_LOCATION,
10475 OMP_CLAUSE_FIRSTPRIVATE);
10476 /* First add firstprivate clause for the __for_end artificial
10477 decl. */
10478 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
10479 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
10480 == REFERENCE_TYPE)
10481 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
10482 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10483 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10484 if (TREE_VEC_ELT (v, 0))
10486 /* And now the same for __for_range artificial decl if it
10487 exists. */
10488 c = build_omp_clause (UNKNOWN_LOCATION,
10489 OMP_CLAUSE_FIRSTPRIVATE);
10490 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
10491 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
10492 == REFERENCE_TYPE)
10493 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
10494 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10495 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10500 switch (TREE_CODE (for_stmt))
10502 case OMP_FOR:
10503 case OMP_DISTRIBUTE:
10504 break;
10505 case OACC_LOOP:
10506 ort = ORT_ACC;
10507 break;
10508 case OMP_TASKLOOP:
10509 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
10510 ort = ORT_UNTIED_TASKLOOP;
10511 else
10512 ort = ORT_TASKLOOP;
10513 break;
10514 case OMP_SIMD:
10515 ort = ORT_SIMD;
10516 break;
10517 default:
10518 gcc_unreachable ();
10521 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
10522 clause for the IV. */
10523 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10525 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
10526 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10527 decl = TREE_OPERAND (t, 0);
10528 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10529 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10530 && OMP_CLAUSE_DECL (c) == decl)
10532 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
10533 break;
10537 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
10538 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
10539 TREE_CODE (for_stmt));
10541 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
10542 gimplify_omp_ctxp->distribute = true;
10544 /* Handle OMP_FOR_INIT. */
10545 for_pre_body = NULL;
10546 if ((ort == ORT_SIMD
10547 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
10548 && OMP_FOR_PRE_BODY (for_stmt))
10550 has_decl_expr = BITMAP_ALLOC (NULL);
10551 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
10552 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
10553 == VAR_DECL)
10555 t = OMP_FOR_PRE_BODY (for_stmt);
10556 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
10558 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
10560 tree_stmt_iterator si;
10561 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
10562 tsi_next (&si))
10564 t = tsi_stmt (si);
10565 if (TREE_CODE (t) == DECL_EXPR
10566 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
10567 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
10571 if (OMP_FOR_PRE_BODY (for_stmt))
10573 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
10574 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
10575 else
10577 struct gimplify_omp_ctx ctx;
10578 memset (&ctx, 0, sizeof (ctx));
10579 ctx.region_type = ORT_NONE;
10580 gimplify_omp_ctxp = &ctx;
10581 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
10582 gimplify_omp_ctxp = NULL;
10585 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
10587 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
10588 for_stmt = inner_for_stmt;
10590 /* For taskloop, need to gimplify the start, end and step before the
10591 taskloop, outside of the taskloop omp context. */
10592 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10594 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10596 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10597 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
10599 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
10600 TREE_OPERAND (t, 1)
10601 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
10602 gimple_seq_empty_p (for_pre_body)
10603 ? pre_p : &for_pre_body, NULL,
10604 false);
10605 /* Reference to pointer conversion is considered useless,
10606 but is significant for firstprivate clause. Force it
10607 here. */
10608 if (TREE_CODE (type) == POINTER_TYPE
10609 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
10610 == REFERENCE_TYPE))
10612 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
10613 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
10614 TREE_OPERAND (t, 1));
10615 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
10616 ? pre_p : &for_pre_body);
10617 TREE_OPERAND (t, 1) = v;
10619 tree c = build_omp_clause (input_location,
10620 OMP_CLAUSE_FIRSTPRIVATE);
10621 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
10622 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10623 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10626 /* Handle OMP_FOR_COND. */
10627 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10628 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
10630 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
10631 TREE_OPERAND (t, 1)
10632 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
10633 gimple_seq_empty_p (for_pre_body)
10634 ? pre_p : &for_pre_body, NULL,
10635 false);
10636 /* Reference to pointer conversion is considered useless,
10637 but is significant for firstprivate clause. Force it
10638 here. */
10639 if (TREE_CODE (type) == POINTER_TYPE
10640 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
10641 == REFERENCE_TYPE))
10643 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
10644 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
10645 TREE_OPERAND (t, 1));
10646 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
10647 ? pre_p : &for_pre_body);
10648 TREE_OPERAND (t, 1) = v;
10650 tree c = build_omp_clause (input_location,
10651 OMP_CLAUSE_FIRSTPRIVATE);
10652 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
10653 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10654 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10657 /* Handle OMP_FOR_INCR. */
10658 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10659 if (TREE_CODE (t) == MODIFY_EXPR)
10661 decl = TREE_OPERAND (t, 0);
10662 t = TREE_OPERAND (t, 1);
10663 tree *tp = &TREE_OPERAND (t, 1);
10664 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
10665 tp = &TREE_OPERAND (t, 0);
10667 if (!is_gimple_constant (*tp))
10669 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
10670 ? pre_p : &for_pre_body;
10671 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
10672 tree c = build_omp_clause (input_location,
10673 OMP_CLAUSE_FIRSTPRIVATE);
10674 OMP_CLAUSE_DECL (c) = *tp;
10675 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10676 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10681 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
10682 OMP_TASKLOOP);
10685 if (orig_for_stmt != for_stmt)
10686 gimplify_omp_ctxp->combined_loop = true;
10688 for_body = NULL;
10689 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10690 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
10691 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10692 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
10694 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
10695 bool is_doacross = false;
10696 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
10698 is_doacross = true;
10699 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
10700 (OMP_FOR_INIT (for_stmt))
10701 * 2);
10703 int collapse = 1, tile = 0;
10704 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
10705 if (c)
10706 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
10707 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
10708 if (c)
10709 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
10710 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10712 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10713 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10714 decl = TREE_OPERAND (t, 0);
10715 gcc_assert (DECL_P (decl));
10716 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
10717 || POINTER_TYPE_P (TREE_TYPE (decl)));
10718 if (is_doacross)
10720 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
10722 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
10723 if (TREE_CODE (orig_decl) == TREE_LIST)
10725 orig_decl = TREE_PURPOSE (orig_decl);
10726 if (!orig_decl)
10727 orig_decl = decl;
10729 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
10731 else
10732 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
10733 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
10736 /* Make sure the iteration variable is private. */
10737 tree c = NULL_TREE;
10738 tree c2 = NULL_TREE;
10739 if (orig_for_stmt != for_stmt)
10741 /* Preserve this information until we gimplify the inner simd. */
10742 if (has_decl_expr
10743 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
10744 TREE_PRIVATE (t) = 1;
10746 else if (ort == ORT_SIMD)
10748 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10749 (splay_tree_key) decl);
10750 omp_is_private (gimplify_omp_ctxp, decl,
10751 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10752 != 1));
10753 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10754 omp_notice_variable (gimplify_omp_ctxp, decl, true);
10755 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10757 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
10758 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
10759 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
10760 if ((has_decl_expr
10761 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
10762 || TREE_PRIVATE (t))
10764 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10765 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10767 struct gimplify_omp_ctx *outer
10768 = gimplify_omp_ctxp->outer_context;
10769 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
10771 if (outer->region_type == ORT_WORKSHARE
10772 && outer->combined_loop)
10774 n = splay_tree_lookup (outer->variables,
10775 (splay_tree_key)decl);
10776 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10778 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10779 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10781 else
10783 struct gimplify_omp_ctx *octx = outer->outer_context;
10784 if (octx
10785 && octx->region_type == ORT_COMBINED_PARALLEL
10786 && octx->outer_context
10787 && (octx->outer_context->region_type
10788 == ORT_WORKSHARE)
10789 && octx->outer_context->combined_loop)
10791 octx = octx->outer_context;
10792 n = splay_tree_lookup (octx->variables,
10793 (splay_tree_key)decl);
10794 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10796 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10797 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10804 OMP_CLAUSE_DECL (c) = decl;
10805 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10806 OMP_FOR_CLAUSES (for_stmt) = c;
10807 omp_add_variable (gimplify_omp_ctxp, decl, flags);
10808 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
10810 if (outer->region_type == ORT_WORKSHARE
10811 && outer->combined_loop)
10813 if (outer->outer_context
10814 && (outer->outer_context->region_type
10815 == ORT_COMBINED_PARALLEL))
10816 outer = outer->outer_context;
10817 else if (omp_check_private (outer, decl, false))
10818 outer = NULL;
10820 else if (((outer->region_type & ORT_TASKLOOP)
10821 == ORT_TASKLOOP)
10822 && outer->combined_loop
10823 && !omp_check_private (gimplify_omp_ctxp,
10824 decl, false))
10826 else if (outer->region_type != ORT_COMBINED_PARALLEL)
10828 omp_notice_variable (outer, decl, true);
10829 outer = NULL;
10831 if (outer)
10833 n = splay_tree_lookup (outer->variables,
10834 (splay_tree_key)decl);
10835 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10837 omp_add_variable (outer, decl,
10838 GOVD_LASTPRIVATE | GOVD_SEEN);
10839 if (outer->region_type == ORT_COMBINED_PARALLEL
10840 && outer->outer_context
10841 && (outer->outer_context->region_type
10842 == ORT_WORKSHARE)
10843 && outer->outer_context->combined_loop)
10845 outer = outer->outer_context;
10846 n = splay_tree_lookup (outer->variables,
10847 (splay_tree_key)decl);
10848 if (omp_check_private (outer, decl, false))
10849 outer = NULL;
10850 else if (n == NULL
10851 || ((n->value & GOVD_DATA_SHARE_CLASS)
10852 == 0))
10853 omp_add_variable (outer, decl,
10854 GOVD_LASTPRIVATE
10855 | GOVD_SEEN);
10856 else
10857 outer = NULL;
10859 if (outer && outer->outer_context
10860 && ((outer->outer_context->region_type
10861 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
10862 || (((outer->region_type & ORT_TASKLOOP)
10863 == ORT_TASKLOOP)
10864 && (outer->outer_context->region_type
10865 == ORT_COMBINED_PARALLEL))))
10867 outer = outer->outer_context;
10868 n = splay_tree_lookup (outer->variables,
10869 (splay_tree_key)decl);
10870 if (n == NULL
10871 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10872 omp_add_variable (outer, decl,
10873 GOVD_SHARED | GOVD_SEEN);
10874 else
10875 outer = NULL;
10877 if (outer && outer->outer_context)
10878 omp_notice_variable (outer->outer_context, decl,
10879 true);
10884 else
10886 bool lastprivate
10887 = (!has_decl_expr
10888 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
10889 if (TREE_PRIVATE (t))
10890 lastprivate = false;
10891 struct gimplify_omp_ctx *outer
10892 = gimplify_omp_ctxp->outer_context;
10893 if (outer && lastprivate)
10895 if (outer->region_type == ORT_WORKSHARE
10896 && outer->combined_loop)
10898 n = splay_tree_lookup (outer->variables,
10899 (splay_tree_key)decl);
10900 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10902 lastprivate = false;
10903 outer = NULL;
10905 else if (outer->outer_context
10906 && (outer->outer_context->region_type
10907 == ORT_COMBINED_PARALLEL))
10908 outer = outer->outer_context;
10909 else if (omp_check_private (outer, decl, false))
10910 outer = NULL;
10912 else if (((outer->region_type & ORT_TASKLOOP)
10913 == ORT_TASKLOOP)
10914 && outer->combined_loop
10915 && !omp_check_private (gimplify_omp_ctxp,
10916 decl, false))
10918 else if (outer->region_type != ORT_COMBINED_PARALLEL)
10920 omp_notice_variable (outer, decl, true);
10921 outer = NULL;
10923 if (outer)
10925 n = splay_tree_lookup (outer->variables,
10926 (splay_tree_key)decl);
10927 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10929 omp_add_variable (outer, decl,
10930 GOVD_LASTPRIVATE | GOVD_SEEN);
10931 if (outer->region_type == ORT_COMBINED_PARALLEL
10932 && outer->outer_context
10933 && (outer->outer_context->region_type
10934 == ORT_WORKSHARE)
10935 && outer->outer_context->combined_loop)
10937 outer = outer->outer_context;
10938 n = splay_tree_lookup (outer->variables,
10939 (splay_tree_key)decl);
10940 if (omp_check_private (outer, decl, false))
10941 outer = NULL;
10942 else if (n == NULL
10943 || ((n->value & GOVD_DATA_SHARE_CLASS)
10944 == 0))
10945 omp_add_variable (outer, decl,
10946 GOVD_LASTPRIVATE
10947 | GOVD_SEEN);
10948 else
10949 outer = NULL;
10951 if (outer && outer->outer_context
10952 && ((outer->outer_context->region_type
10953 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
10954 || (((outer->region_type & ORT_TASKLOOP)
10955 == ORT_TASKLOOP)
10956 && (outer->outer_context->region_type
10957 == ORT_COMBINED_PARALLEL))))
10959 outer = outer->outer_context;
10960 n = splay_tree_lookup (outer->variables,
10961 (splay_tree_key)decl);
10962 if (n == NULL
10963 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10964 omp_add_variable (outer, decl,
10965 GOVD_SHARED | GOVD_SEEN);
10966 else
10967 outer = NULL;
10969 if (outer && outer->outer_context)
10970 omp_notice_variable (outer->outer_context, decl,
10971 true);
10976 c = build_omp_clause (input_location,
10977 lastprivate ? OMP_CLAUSE_LASTPRIVATE
10978 : OMP_CLAUSE_PRIVATE);
10979 OMP_CLAUSE_DECL (c) = decl;
10980 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10981 OMP_FOR_CLAUSES (for_stmt) = c;
10982 omp_add_variable (gimplify_omp_ctxp, decl,
10983 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
10984 | GOVD_EXPLICIT | GOVD_SEEN);
10985 c = NULL_TREE;
10988 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
10989 omp_notice_variable (gimplify_omp_ctxp, decl, true);
10990 else
10991 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
10993 /* If DECL is not a gimple register, create a temporary variable to act
10994 as an iteration counter. This is valid, since DECL cannot be
10995 modified in the body of the loop. Similarly for any iteration vars
10996 in simd with collapse > 1 where the iterator vars must be
10997 lastprivate. */
10998 if (orig_for_stmt != for_stmt)
10999 var = decl;
11000 else if (!is_gimple_reg (decl)
11001 || (ort == ORT_SIMD
11002 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
11004 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11005 /* Make sure omp_add_variable is not called on it prematurely.
11006 We call it ourselves a few lines later. */
11007 gimplify_omp_ctxp = NULL;
11008 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11009 gimplify_omp_ctxp = ctx;
11010 TREE_OPERAND (t, 0) = var;
11012 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
11014 if (ort == ORT_SIMD
11015 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11017 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11018 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
11019 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
11020 OMP_CLAUSE_DECL (c2) = var;
11021 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
11022 OMP_FOR_CLAUSES (for_stmt) = c2;
11023 omp_add_variable (gimplify_omp_ctxp, var,
11024 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
11025 if (c == NULL_TREE)
11027 c = c2;
11028 c2 = NULL_TREE;
11031 else
11032 omp_add_variable (gimplify_omp_ctxp, var,
11033 GOVD_PRIVATE | GOVD_SEEN);
11035 else
11036 var = decl;
11038 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11039 is_gimple_val, fb_rvalue, false);
11040 ret = MIN (ret, tret);
11041 if (ret == GS_ERROR)
11042 return ret;
11044 /* Handle OMP_FOR_COND. */
11045 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11046 gcc_assert (COMPARISON_CLASS_P (t));
11047 gcc_assert (TREE_OPERAND (t, 0) == decl);
11049 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11050 is_gimple_val, fb_rvalue, false);
11051 ret = MIN (ret, tret);
11053 /* Handle OMP_FOR_INCR. */
11054 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11055 switch (TREE_CODE (t))
11057 case PREINCREMENT_EXPR:
11058 case POSTINCREMENT_EXPR:
11060 tree decl = TREE_OPERAND (t, 0);
11061 /* c_omp_for_incr_canonicalize_ptr() should have been
11062 called to massage things appropriately. */
11063 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11065 if (orig_for_stmt != for_stmt)
11066 break;
11067 t = build_int_cst (TREE_TYPE (decl), 1);
11068 if (c)
11069 OMP_CLAUSE_LINEAR_STEP (c) = t;
11070 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11071 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11072 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11073 break;
11076 case PREDECREMENT_EXPR:
11077 case POSTDECREMENT_EXPR:
11078 /* c_omp_for_incr_canonicalize_ptr() should have been
11079 called to massage things appropriately. */
11080 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11081 if (orig_for_stmt != for_stmt)
11082 break;
11083 t = build_int_cst (TREE_TYPE (decl), -1);
11084 if (c)
11085 OMP_CLAUSE_LINEAR_STEP (c) = t;
11086 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11087 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11088 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11089 break;
11091 case MODIFY_EXPR:
11092 gcc_assert (TREE_OPERAND (t, 0) == decl);
11093 TREE_OPERAND (t, 0) = var;
11095 t = TREE_OPERAND (t, 1);
11096 switch (TREE_CODE (t))
11098 case PLUS_EXPR:
11099 if (TREE_OPERAND (t, 1) == decl)
11101 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
11102 TREE_OPERAND (t, 0) = var;
11103 break;
11106 /* Fallthru. */
11107 case MINUS_EXPR:
11108 case POINTER_PLUS_EXPR:
11109 gcc_assert (TREE_OPERAND (t, 0) == decl);
11110 TREE_OPERAND (t, 0) = var;
11111 break;
11112 default:
11113 gcc_unreachable ();
11116 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11117 is_gimple_val, fb_rvalue, false);
11118 ret = MIN (ret, tret);
11119 if (c)
11121 tree step = TREE_OPERAND (t, 1);
11122 tree stept = TREE_TYPE (decl);
11123 if (POINTER_TYPE_P (stept))
11124 stept = sizetype;
11125 step = fold_convert (stept, step);
11126 if (TREE_CODE (t) == MINUS_EXPR)
11127 step = fold_build1 (NEGATE_EXPR, stept, step);
11128 OMP_CLAUSE_LINEAR_STEP (c) = step;
11129 if (step != TREE_OPERAND (t, 1))
11131 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
11132 &for_pre_body, NULL,
11133 is_gimple_val, fb_rvalue, false);
11134 ret = MIN (ret, tret);
11137 break;
11139 default:
11140 gcc_unreachable ();
11143 if (c2)
11145 gcc_assert (c);
11146 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
11149 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
11151 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
11152 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11153 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
11154 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11155 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
11156 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
11157 && OMP_CLAUSE_DECL (c) == decl)
11159 if (is_doacross && (collapse == 1 || i >= collapse))
11160 t = var;
11161 else
11163 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11164 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11165 gcc_assert (TREE_OPERAND (t, 0) == var);
11166 t = TREE_OPERAND (t, 1);
11167 gcc_assert (TREE_CODE (t) == PLUS_EXPR
11168 || TREE_CODE (t) == MINUS_EXPR
11169 || TREE_CODE (t) == POINTER_PLUS_EXPR);
11170 gcc_assert (TREE_OPERAND (t, 0) == var);
11171 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
11172 is_doacross ? var : decl,
11173 TREE_OPERAND (t, 1));
11175 gimple_seq *seq;
11176 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11177 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
11178 else
11179 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
11180 push_gimplify_context ();
11181 gimplify_assign (decl, t, seq);
11182 gimple *bind = NULL;
11183 if (gimplify_ctxp->temps)
11185 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
11186 *seq = NULL;
11187 gimplify_seq_add_stmt (seq, bind);
11189 pop_gimplify_context (bind);
11194 BITMAP_FREE (has_decl_expr);
11196 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11198 push_gimplify_context ();
11199 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
11201 OMP_FOR_BODY (orig_for_stmt)
11202 = build3 (BIND_EXPR, void_type_node, NULL,
11203 OMP_FOR_BODY (orig_for_stmt), NULL);
11204 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
11208 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
11209 &for_body);
11211 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11213 if (gimple_code (g) == GIMPLE_BIND)
11214 pop_gimplify_context (g);
11215 else
11216 pop_gimplify_context (NULL);
11219 if (orig_for_stmt != for_stmt)
11220 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11222 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11223 decl = TREE_OPERAND (t, 0);
11224 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11225 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11226 gimplify_omp_ctxp = ctx->outer_context;
11227 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11228 gimplify_omp_ctxp = ctx;
11229 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
11230 TREE_OPERAND (t, 0) = var;
11231 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11232 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
11233 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
11236 gimplify_adjust_omp_clauses (pre_p, for_body,
11237 &OMP_FOR_CLAUSES (orig_for_stmt),
11238 TREE_CODE (orig_for_stmt));
11240 int kind;
11241 switch (TREE_CODE (orig_for_stmt))
11243 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
11244 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
11245 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
11246 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
11247 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
11248 default:
11249 gcc_unreachable ();
11251 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
11252 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
11253 for_pre_body);
11254 if (orig_for_stmt != for_stmt)
11255 gimple_omp_for_set_combined_p (gfor, true);
11256 if (gimplify_omp_ctxp
11257 && (gimplify_omp_ctxp->combined_loop
11258 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
11259 && gimplify_omp_ctxp->outer_context
11260 && gimplify_omp_ctxp->outer_context->combined_loop)))
11262 gimple_omp_for_set_combined_into_p (gfor, true);
11263 if (gimplify_omp_ctxp->combined_loop)
11264 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
11265 else
11266 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
11269 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11271 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11272 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
11273 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
11274 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11275 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
11276 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
11277 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11278 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
11281 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
11282 constructs with GIMPLE_OMP_TASK sandwiched in between them.
11283 The outer taskloop stands for computing the number of iterations,
11284 counts for collapsed loops and holding taskloop specific clauses.
11285 The task construct stands for the effect of data sharing on the
11286 explicit task it creates and the inner taskloop stands for expansion
11287 of the static loop inside of the explicit task construct. */
11288 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11290 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
11291 tree task_clauses = NULL_TREE;
11292 tree c = *gfor_clauses_ptr;
11293 tree *gtask_clauses_ptr = &task_clauses;
11294 tree outer_for_clauses = NULL_TREE;
11295 tree *gforo_clauses_ptr = &outer_for_clauses;
11296 for (; c; c = OMP_CLAUSE_CHAIN (c))
11297 switch (OMP_CLAUSE_CODE (c))
11299 /* These clauses are allowed on task, move them there. */
11300 case OMP_CLAUSE_SHARED:
11301 case OMP_CLAUSE_FIRSTPRIVATE:
11302 case OMP_CLAUSE_DEFAULT:
11303 case OMP_CLAUSE_IF:
11304 case OMP_CLAUSE_UNTIED:
11305 case OMP_CLAUSE_FINAL:
11306 case OMP_CLAUSE_MERGEABLE:
11307 case OMP_CLAUSE_PRIORITY:
11308 case OMP_CLAUSE_REDUCTION:
11309 case OMP_CLAUSE_IN_REDUCTION:
11310 *gtask_clauses_ptr = c;
11311 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11312 break;
11313 case OMP_CLAUSE_PRIVATE:
11314 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
11316 /* We want private on outer for and firstprivate
11317 on task. */
11318 *gtask_clauses_ptr
11319 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11320 OMP_CLAUSE_FIRSTPRIVATE);
11321 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11322 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
11323 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11324 *gforo_clauses_ptr = c;
11325 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11327 else
11329 *gtask_clauses_ptr = c;
11330 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11332 break;
11333 /* These clauses go into outer taskloop clauses. */
11334 case OMP_CLAUSE_GRAINSIZE:
11335 case OMP_CLAUSE_NUM_TASKS:
11336 case OMP_CLAUSE_NOGROUP:
11337 *gforo_clauses_ptr = c;
11338 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11339 break;
11340 /* Taskloop clause we duplicate on both taskloops. */
11341 case OMP_CLAUSE_COLLAPSE:
11342 *gfor_clauses_ptr = c;
11343 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11344 *gforo_clauses_ptr = copy_node (c);
11345 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
11346 break;
11347 /* For lastprivate, keep the clause on inner taskloop, and add
11348 a shared clause on task. If the same decl is also firstprivate,
11349 add also firstprivate clause on the inner taskloop. */
11350 case OMP_CLAUSE_LASTPRIVATE:
11351 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
11353 /* For taskloop C++ lastprivate IVs, we want:
11354 1) private on outer taskloop
11355 2) firstprivate and shared on task
11356 3) lastprivate on inner taskloop */
11357 *gtask_clauses_ptr
11358 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11359 OMP_CLAUSE_FIRSTPRIVATE);
11360 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11361 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
11362 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11363 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
11364 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11365 OMP_CLAUSE_PRIVATE);
11366 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
11367 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
11368 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
11369 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
11371 *gfor_clauses_ptr = c;
11372 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11373 *gtask_clauses_ptr
11374 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
11375 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11376 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
11377 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
11378 gtask_clauses_ptr
11379 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11380 break;
11381 default:
11382 gcc_unreachable ();
11384 *gfor_clauses_ptr = NULL_TREE;
11385 *gtask_clauses_ptr = NULL_TREE;
11386 *gforo_clauses_ptr = NULL_TREE;
11387 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
11388 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
11389 NULL_TREE, NULL_TREE, NULL_TREE);
11390 gimple_omp_task_set_taskloop_p (g, true);
11391 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
11392 gomp_for *gforo
11393 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
11394 gimple_omp_for_collapse (gfor),
11395 gimple_omp_for_pre_body (gfor));
11396 gimple_omp_for_set_pre_body (gfor, NULL);
11397 gimple_omp_for_set_combined_p (gforo, true);
11398 gimple_omp_for_set_combined_into_p (gfor, true);
11399 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
11401 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
11402 tree v = create_tmp_var (type);
11403 gimple_omp_for_set_index (gforo, i, v);
11404 t = unshare_expr (gimple_omp_for_initial (gfor, i));
11405 gimple_omp_for_set_initial (gforo, i, t);
11406 gimple_omp_for_set_cond (gforo, i,
11407 gimple_omp_for_cond (gfor, i));
11408 t = unshare_expr (gimple_omp_for_final (gfor, i));
11409 gimple_omp_for_set_final (gforo, i, t);
11410 t = unshare_expr (gimple_omp_for_incr (gfor, i));
11411 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
11412 TREE_OPERAND (t, 0) = v;
11413 gimple_omp_for_set_incr (gforo, i, t);
11414 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
11415 OMP_CLAUSE_DECL (t) = v;
11416 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
11417 gimple_omp_for_set_clauses (gforo, t);
11419 gimplify_seq_add_stmt (pre_p, gforo);
11421 else
11422 gimplify_seq_add_stmt (pre_p, gfor);
11423 if (ret != GS_ALL_DONE)
11424 return GS_ERROR;
11425 *expr_p = NULL_TREE;
11426 return GS_ALL_DONE;
11429 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
11430 of OMP_TARGET's body. */
11432 static tree
11433 find_omp_teams (tree *tp, int *walk_subtrees, void *)
11435 *walk_subtrees = 0;
11436 switch (TREE_CODE (*tp))
11438 case OMP_TEAMS:
11439 return *tp;
11440 case BIND_EXPR:
11441 case STATEMENT_LIST:
11442 *walk_subtrees = 1;
11443 break;
11444 default:
11445 break;
11447 return NULL_TREE;
11450 /* Helper function of optimize_target_teams, determine if the expression
11451 can be computed safely before the target construct on the host. */
11453 static tree
11454 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
11456 splay_tree_node n;
11458 if (TYPE_P (*tp))
11460 *walk_subtrees = 0;
11461 return NULL_TREE;
11463 switch (TREE_CODE (*tp))
11465 case VAR_DECL:
11466 case PARM_DECL:
11467 case RESULT_DECL:
11468 *walk_subtrees = 0;
11469 if (error_operand_p (*tp)
11470 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
11471 || DECL_HAS_VALUE_EXPR_P (*tp)
11472 || DECL_THREAD_LOCAL_P (*tp)
11473 || TREE_SIDE_EFFECTS (*tp)
11474 || TREE_THIS_VOLATILE (*tp))
11475 return *tp;
11476 if (is_global_var (*tp)
11477 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
11478 || lookup_attribute ("omp declare target link",
11479 DECL_ATTRIBUTES (*tp))))
11480 return *tp;
11481 if (VAR_P (*tp)
11482 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
11483 && !is_global_var (*tp)
11484 && decl_function_context (*tp) == current_function_decl)
11485 return *tp;
11486 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11487 (splay_tree_key) *tp);
11488 if (n == NULL)
11490 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
11491 return NULL_TREE;
11492 return *tp;
11494 else if (n->value & GOVD_LOCAL)
11495 return *tp;
11496 else if (n->value & GOVD_FIRSTPRIVATE)
11497 return NULL_TREE;
11498 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
11499 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
11500 return NULL_TREE;
11501 return *tp;
11502 case INTEGER_CST:
11503 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
11504 return *tp;
11505 return NULL_TREE;
11506 case TARGET_EXPR:
11507 if (TARGET_EXPR_INITIAL (*tp)
11508 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
11509 return *tp;
11510 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
11511 walk_subtrees, NULL);
11512 /* Allow some reasonable subset of integral arithmetics. */
11513 case PLUS_EXPR:
11514 case MINUS_EXPR:
11515 case MULT_EXPR:
11516 case TRUNC_DIV_EXPR:
11517 case CEIL_DIV_EXPR:
11518 case FLOOR_DIV_EXPR:
11519 case ROUND_DIV_EXPR:
11520 case TRUNC_MOD_EXPR:
11521 case CEIL_MOD_EXPR:
11522 case FLOOR_MOD_EXPR:
11523 case ROUND_MOD_EXPR:
11524 case RDIV_EXPR:
11525 case EXACT_DIV_EXPR:
11526 case MIN_EXPR:
11527 case MAX_EXPR:
11528 case LSHIFT_EXPR:
11529 case RSHIFT_EXPR:
11530 case BIT_IOR_EXPR:
11531 case BIT_XOR_EXPR:
11532 case BIT_AND_EXPR:
11533 case NEGATE_EXPR:
11534 case ABS_EXPR:
11535 case BIT_NOT_EXPR:
11536 case NON_LVALUE_EXPR:
11537 CASE_CONVERT:
11538 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
11539 return *tp;
11540 return NULL_TREE;
11541 /* And disallow anything else, except for comparisons. */
11542 default:
11543 if (COMPARISON_CLASS_P (*tp))
11544 return NULL_TREE;
11545 return *tp;
11549 /* Try to determine if the num_teams and/or thread_limit expressions
11550 can have their values determined already before entering the
11551 target construct.
11552 INTEGER_CSTs trivially are,
11553 integral decls that are firstprivate (explicitly or implicitly)
11554 or explicitly map(always, to:) or map(always, tofrom:) on the target
11555 region too, and expressions involving simple arithmetics on those
11556 too, function calls are not ok, dereferencing something neither etc.
11557 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
11558 EXPR based on what we find:
11559 0 stands for clause not specified at all, use implementation default
11560 -1 stands for value that can't be determined easily before entering
11561 the target construct.
11562 If teams construct is not present at all, use 1 for num_teams
11563 and 0 for thread_limit (only one team is involved, and the thread
11564 limit is implementation defined. */
11566 static void
11567 optimize_target_teams (tree target, gimple_seq *pre_p)
11569 tree body = OMP_BODY (target);
11570 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
11571 tree num_teams = integer_zero_node;
11572 tree thread_limit = integer_zero_node;
11573 location_t num_teams_loc = EXPR_LOCATION (target);
11574 location_t thread_limit_loc = EXPR_LOCATION (target);
11575 tree c, *p, expr;
11576 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
11578 if (teams == NULL_TREE)
11579 num_teams = integer_one_node;
11580 else
11581 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
11583 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
11585 p = &num_teams;
11586 num_teams_loc = OMP_CLAUSE_LOCATION (c);
11588 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
11590 p = &thread_limit;
11591 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
11593 else
11594 continue;
11595 expr = OMP_CLAUSE_OPERAND (c, 0);
11596 if (TREE_CODE (expr) == INTEGER_CST)
11598 *p = expr;
11599 continue;
11601 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
11603 *p = integer_minus_one_node;
11604 continue;
11606 *p = expr;
11607 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
11608 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
11609 == GS_ERROR)
11611 gimplify_omp_ctxp = target_ctx;
11612 *p = integer_minus_one_node;
11613 continue;
11615 gimplify_omp_ctxp = target_ctx;
11616 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
11617 OMP_CLAUSE_OPERAND (c, 0) = *p;
11619 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
11620 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
11621 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
11622 OMP_TARGET_CLAUSES (target) = c;
11623 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
11624 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
11625 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
11626 OMP_TARGET_CLAUSES (target) = c;
11629 /* Gimplify the gross structure of several OMP constructs. */
11631 static void
11632 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
11634 tree expr = *expr_p;
11635 gimple *stmt;
11636 gimple_seq body = NULL;
11637 enum omp_region_type ort;
11639 switch (TREE_CODE (expr))
11641 case OMP_SECTIONS:
11642 case OMP_SINGLE:
11643 ort = ORT_WORKSHARE;
11644 break;
11645 case OMP_TARGET:
11646 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
11647 break;
11648 case OACC_KERNELS:
11649 ort = ORT_ACC_KERNELS;
11650 break;
11651 case OACC_PARALLEL:
11652 ort = ORT_ACC_PARALLEL;
11653 break;
11654 case OACC_DATA:
11655 ort = ORT_ACC_DATA;
11656 break;
11657 case OMP_TARGET_DATA:
11658 ort = ORT_TARGET_DATA;
11659 break;
11660 case OMP_TEAMS:
11661 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
11662 if (gimplify_omp_ctxp == NULL
11663 || (gimplify_omp_ctxp->region_type == ORT_TARGET
11664 && gimplify_omp_ctxp->outer_context == NULL
11665 && lookup_attribute ("omp declare target",
11666 DECL_ATTRIBUTES (current_function_decl))))
11667 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
11668 break;
11669 case OACC_HOST_DATA:
11670 ort = ORT_ACC_HOST_DATA;
11671 break;
11672 default:
11673 gcc_unreachable ();
11675 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
11676 TREE_CODE (expr));
11677 if (TREE_CODE (expr) == OMP_TARGET)
11678 optimize_target_teams (expr, pre_p);
11679 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
11680 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
11682 push_gimplify_context ();
11683 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
11684 if (gimple_code (g) == GIMPLE_BIND)
11685 pop_gimplify_context (g);
11686 else
11687 pop_gimplify_context (NULL);
11688 if ((ort & ORT_TARGET_DATA) != 0)
11690 enum built_in_function end_ix;
11691 switch (TREE_CODE (expr))
11693 case OACC_DATA:
11694 case OACC_HOST_DATA:
11695 end_ix = BUILT_IN_GOACC_DATA_END;
11696 break;
11697 case OMP_TARGET_DATA:
11698 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
11699 break;
11700 default:
11701 gcc_unreachable ();
11703 tree fn = builtin_decl_explicit (end_ix);
11704 g = gimple_build_call (fn, 0);
11705 gimple_seq cleanup = NULL;
11706 gimple_seq_add_stmt (&cleanup, g);
11707 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11708 body = NULL;
11709 gimple_seq_add_stmt (&body, g);
11712 else
11713 gimplify_and_add (OMP_BODY (expr), &body);
11714 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
11715 TREE_CODE (expr));
11717 switch (TREE_CODE (expr))
11719 case OACC_DATA:
11720 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
11721 OMP_CLAUSES (expr));
11722 break;
11723 case OACC_KERNELS:
11724 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
11725 OMP_CLAUSES (expr));
11726 break;
11727 case OACC_HOST_DATA:
11728 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
11729 OMP_CLAUSES (expr));
11730 break;
11731 case OACC_PARALLEL:
11732 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
11733 OMP_CLAUSES (expr));
11734 break;
11735 case OMP_SECTIONS:
11736 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
11737 break;
11738 case OMP_SINGLE:
11739 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
11740 break;
11741 case OMP_TARGET:
11742 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
11743 OMP_CLAUSES (expr));
11744 break;
11745 case OMP_TARGET_DATA:
11746 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
11747 OMP_CLAUSES (expr));
11748 break;
11749 case OMP_TEAMS:
11750 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
11751 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
11752 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
11753 break;
11754 default:
11755 gcc_unreachable ();
11758 gimplify_seq_add_stmt (pre_p, stmt);
11759 *expr_p = NULL_TREE;
11762 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
11763 target update constructs. */
11765 static void
11766 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
11768 tree expr = *expr_p;
11769 int kind;
11770 gomp_target *stmt;
11771 enum omp_region_type ort = ORT_WORKSHARE;
11773 switch (TREE_CODE (expr))
11775 case OACC_ENTER_DATA:
11776 case OACC_EXIT_DATA:
11777 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
11778 ort = ORT_ACC;
11779 break;
11780 case OACC_UPDATE:
11781 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
11782 ort = ORT_ACC;
11783 break;
11784 case OMP_TARGET_UPDATE:
11785 kind = GF_OMP_TARGET_KIND_UPDATE;
11786 break;
11787 case OMP_TARGET_ENTER_DATA:
11788 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
11789 break;
11790 case OMP_TARGET_EXIT_DATA:
11791 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
11792 break;
11793 default:
11794 gcc_unreachable ();
11796 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
11797 ort, TREE_CODE (expr));
11798 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
11799 TREE_CODE (expr));
11800 if (TREE_CODE (expr) == OACC_UPDATE
11801 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
11802 OMP_CLAUSE_IF_PRESENT))
11804 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
11805 clause. */
11806 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11807 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
11808 switch (OMP_CLAUSE_MAP_KIND (c))
11810 case GOMP_MAP_FORCE_TO:
11811 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
11812 break;
11813 case GOMP_MAP_FORCE_FROM:
11814 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
11815 break;
11816 default:
11817 break;
11820 else if (TREE_CODE (expr) == OACC_EXIT_DATA
11821 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
11822 OMP_CLAUSE_FINALIZE))
11824 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote that "finalize"
11825 semantics apply to all mappings of this OpenACC directive. */
11826 bool finalize_marked = false;
11827 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11828 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
11829 switch (OMP_CLAUSE_MAP_KIND (c))
11831 case GOMP_MAP_FROM:
11832 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
11833 finalize_marked = true;
11834 break;
11835 case GOMP_MAP_RELEASE:
11836 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
11837 finalize_marked = true;
11838 break;
11839 default:
11840 /* Check consistency: libgomp relies on the very first data
11841 mapping clause being marked, so make sure we did that before
11842 any other mapping clauses. */
11843 gcc_assert (finalize_marked);
11844 break;
11847 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
11849 gimplify_seq_add_stmt (pre_p, stmt);
11850 *expr_p = NULL_TREE;
11853 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
11854 stabilized the lhs of the atomic operation as *ADDR. Return true if
11855 EXPR is this stabilized form. */
11857 static bool
11858 goa_lhs_expr_p (tree expr, tree addr)
11860 /* Also include casts to other type variants. The C front end is fond
11861 of adding these for e.g. volatile variables. This is like
11862 STRIP_TYPE_NOPS but includes the main variant lookup. */
11863 STRIP_USELESS_TYPE_CONVERSION (expr);
11865 if (TREE_CODE (expr) == INDIRECT_REF)
11867 expr = TREE_OPERAND (expr, 0);
11868 while (expr != addr
11869 && (CONVERT_EXPR_P (expr)
11870 || TREE_CODE (expr) == NON_LVALUE_EXPR)
11871 && TREE_CODE (expr) == TREE_CODE (addr)
11872 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
11874 expr = TREE_OPERAND (expr, 0);
11875 addr = TREE_OPERAND (addr, 0);
11877 if (expr == addr)
11878 return true;
11879 return (TREE_CODE (addr) == ADDR_EXPR
11880 && TREE_CODE (expr) == ADDR_EXPR
11881 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
11883 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
11884 return true;
11885 return false;
11888 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
11889 expression does not involve the lhs, evaluate it into a temporary.
11890 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
11891 or -1 if an error was encountered. */
11893 static int
11894 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
11895 tree lhs_var)
11897 tree expr = *expr_p;
11898 int saw_lhs;
11900 if (goa_lhs_expr_p (expr, lhs_addr))
11902 *expr_p = lhs_var;
11903 return 1;
11905 if (is_gimple_val (expr))
11906 return 0;
11908 saw_lhs = 0;
11909 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
11911 case tcc_binary:
11912 case tcc_comparison:
11913 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
11914 lhs_var);
11915 /* FALLTHRU */
11916 case tcc_unary:
11917 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
11918 lhs_var);
11919 break;
11920 case tcc_expression:
11921 switch (TREE_CODE (expr))
11923 case TRUTH_ANDIF_EXPR:
11924 case TRUTH_ORIF_EXPR:
11925 case TRUTH_AND_EXPR:
11926 case TRUTH_OR_EXPR:
11927 case TRUTH_XOR_EXPR:
11928 case BIT_INSERT_EXPR:
11929 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
11930 lhs_addr, lhs_var);
11931 /* FALLTHRU */
11932 case TRUTH_NOT_EXPR:
11933 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
11934 lhs_addr, lhs_var);
11935 break;
11936 case COMPOUND_EXPR:
11937 /* Break out any preevaluations from cp_build_modify_expr. */
11938 for (; TREE_CODE (expr) == COMPOUND_EXPR;
11939 expr = TREE_OPERAND (expr, 1))
11940 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
11941 *expr_p = expr;
11942 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
11943 default:
11944 break;
11946 break;
11947 case tcc_reference:
11948 if (TREE_CODE (expr) == BIT_FIELD_REF)
11949 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
11950 lhs_addr, lhs_var);
11951 break;
11952 default:
11953 break;
11956 if (saw_lhs == 0)
11958 enum gimplify_status gs;
11959 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
11960 if (gs != GS_ALL_DONE)
11961 saw_lhs = -1;
11964 return saw_lhs;
11967 /* Gimplify an OMP_ATOMIC statement. */
11969 static enum gimplify_status
11970 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
11972 tree addr = TREE_OPERAND (*expr_p, 0);
11973 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
11974 ? NULL : TREE_OPERAND (*expr_p, 1);
11975 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
11976 tree tmp_load;
11977 gomp_atomic_load *loadstmt;
11978 gomp_atomic_store *storestmt;
11980 tmp_load = create_tmp_reg (type);
11981 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
11982 return GS_ERROR;
11984 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
11985 != GS_ALL_DONE)
11986 return GS_ERROR;
11988 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
11989 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
11990 gimplify_seq_add_stmt (pre_p, loadstmt);
11991 if (rhs)
11993 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
11994 representatives. Use BIT_FIELD_REF on the lhs instead. */
11995 if (TREE_CODE (rhs) == BIT_INSERT_EXPR
11996 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
11998 tree bitpos = TREE_OPERAND (rhs, 2);
11999 tree op1 = TREE_OPERAND (rhs, 1);
12000 tree bitsize;
12001 tree tmp_store = tmp_load;
12002 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
12003 tmp_store = get_initialized_tmp_var (tmp_load, pre_p, NULL);
12004 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
12005 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
12006 else
12007 bitsize = TYPE_SIZE (TREE_TYPE (op1));
12008 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
12009 tree t = build2_loc (EXPR_LOCATION (rhs),
12010 MODIFY_EXPR, void_type_node,
12011 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
12012 TREE_TYPE (op1), tmp_store, bitsize,
12013 bitpos), op1);
12014 gimplify_and_add (t, pre_p);
12015 rhs = tmp_store;
12017 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
12018 != GS_ALL_DONE)
12019 return GS_ERROR;
12022 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
12023 rhs = tmp_load;
12024 storestmt
12025 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
12026 gimplify_seq_add_stmt (pre_p, storestmt);
12027 switch (TREE_CODE (*expr_p))
12029 case OMP_ATOMIC_READ:
12030 case OMP_ATOMIC_CAPTURE_OLD:
12031 *expr_p = tmp_load;
12032 gimple_omp_atomic_set_need_value (loadstmt);
12033 break;
12034 case OMP_ATOMIC_CAPTURE_NEW:
12035 *expr_p = rhs;
12036 gimple_omp_atomic_set_need_value (storestmt);
12037 break;
12038 default:
12039 *expr_p = NULL;
12040 break;
12043 return GS_ALL_DONE;
12046 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
12047 body, and adding some EH bits. */
12049 static enum gimplify_status
12050 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
12052 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
12053 gimple *body_stmt;
12054 gtransaction *trans_stmt;
12055 gimple_seq body = NULL;
12056 int subcode = 0;
12058 /* Wrap the transaction body in a BIND_EXPR so we have a context
12059 where to put decls for OMP. */
12060 if (TREE_CODE (tbody) != BIND_EXPR)
12062 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
12063 TREE_SIDE_EFFECTS (bind) = 1;
12064 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
12065 TRANSACTION_EXPR_BODY (expr) = bind;
12068 push_gimplify_context ();
12069 temp = voidify_wrapper_expr (*expr_p, NULL);
12071 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
12072 pop_gimplify_context (body_stmt);
12074 trans_stmt = gimple_build_transaction (body);
12075 if (TRANSACTION_EXPR_OUTER (expr))
12076 subcode = GTMA_IS_OUTER;
12077 else if (TRANSACTION_EXPR_RELAXED (expr))
12078 subcode = GTMA_IS_RELAXED;
12079 gimple_transaction_set_subcode (trans_stmt, subcode);
12081 gimplify_seq_add_stmt (pre_p, trans_stmt);
12083 if (temp)
12085 *expr_p = temp;
12086 return GS_OK;
12089 *expr_p = NULL_TREE;
12090 return GS_ALL_DONE;
12093 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
12094 is the OMP_BODY of the original EXPR (which has already been
12095 gimplified so it's not present in the EXPR).
12097 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
12099 static gimple *
12100 gimplify_omp_ordered (tree expr, gimple_seq body)
12102 tree c, decls;
12103 int failures = 0;
12104 unsigned int i;
12105 tree source_c = NULL_TREE;
12106 tree sink_c = NULL_TREE;
12108 if (gimplify_omp_ctxp)
12110 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12111 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12112 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
12113 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
12114 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
12116 error_at (OMP_CLAUSE_LOCATION (c),
12117 "%<ordered%> construct with %<depend%> clause must be "
12118 "closely nested inside a loop with %<ordered%> clause "
12119 "with a parameter");
12120 failures++;
12122 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12123 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
12125 bool fail = false;
12126 for (decls = OMP_CLAUSE_DECL (c), i = 0;
12127 decls && TREE_CODE (decls) == TREE_LIST;
12128 decls = TREE_CHAIN (decls), ++i)
12129 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
12130 continue;
12131 else if (TREE_VALUE (decls)
12132 != gimplify_omp_ctxp->loop_iter_var[2 * i])
12134 error_at (OMP_CLAUSE_LOCATION (c),
12135 "variable %qE is not an iteration "
12136 "of outermost loop %d, expected %qE",
12137 TREE_VALUE (decls), i + 1,
12138 gimplify_omp_ctxp->loop_iter_var[2 * i]);
12139 fail = true;
12140 failures++;
12142 else
12143 TREE_VALUE (decls)
12144 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
12145 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
12147 error_at (OMP_CLAUSE_LOCATION (c),
12148 "number of variables in %<depend%> clause with "
12149 "%<sink%> modifier does not match number of "
12150 "iteration variables");
12151 failures++;
12153 sink_c = c;
12155 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12156 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
12158 if (source_c)
12160 error_at (OMP_CLAUSE_LOCATION (c),
12161 "more than one %<depend%> clause with %<source%> "
12162 "modifier on an %<ordered%> construct");
12163 failures++;
12165 else
12166 source_c = c;
12169 if (source_c && sink_c)
12171 error_at (OMP_CLAUSE_LOCATION (source_c),
12172 "%<depend%> clause with %<source%> modifier specified "
12173 "together with %<depend%> clauses with %<sink%> modifier "
12174 "on the same construct");
12175 failures++;
12178 if (failures)
12179 return gimple_build_nop ();
12180 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
12183 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
12184 expression produces a value to be used as an operand inside a GIMPLE
12185 statement, the value will be stored back in *EXPR_P. This value will
12186 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
12187 an SSA_NAME. The corresponding sequence of GIMPLE statements is
12188 emitted in PRE_P and POST_P.
12190 Additionally, this process may overwrite parts of the input
12191 expression during gimplification. Ideally, it should be
12192 possible to do non-destructive gimplification.
12194 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
12195 the expression needs to evaluate to a value to be used as
12196 an operand in a GIMPLE statement, this value will be stored in
12197 *EXPR_P on exit. This happens when the caller specifies one
12198 of fb_lvalue or fb_rvalue fallback flags.
12200 PRE_P will contain the sequence of GIMPLE statements corresponding
12201 to the evaluation of EXPR and all the side-effects that must
12202 be executed before the main expression. On exit, the last
12203 statement of PRE_P is the core statement being gimplified. For
12204 instance, when gimplifying 'if (++a)' the last statement in
12205 PRE_P will be 'if (t.1)' where t.1 is the result of
12206 pre-incrementing 'a'.
12208 POST_P will contain the sequence of GIMPLE statements corresponding
12209 to the evaluation of all the side-effects that must be executed
12210 after the main expression. If this is NULL, the post
12211 side-effects are stored at the end of PRE_P.
12213 The reason why the output is split in two is to handle post
12214 side-effects explicitly. In some cases, an expression may have
12215 inner and outer post side-effects which need to be emitted in
12216 an order different from the one given by the recursive
12217 traversal. For instance, for the expression (*p--)++ the post
12218 side-effects of '--' must actually occur *after* the post
12219 side-effects of '++'. However, gimplification will first visit
12220 the inner expression, so if a separate POST sequence was not
12221 used, the resulting sequence would be:
12223 1 t.1 = *p
12224 2 p = p - 1
12225 3 t.2 = t.1 + 1
12226 4 *p = t.2
12228 However, the post-decrement operation in line #2 must not be
12229 evaluated until after the store to *p at line #4, so the
12230 correct sequence should be:
12232 1 t.1 = *p
12233 2 t.2 = t.1 + 1
12234 3 *p = t.2
12235 4 p = p - 1
12237 So, by specifying a separate post queue, it is possible
12238 to emit the post side-effects in the correct order.
12239 If POST_P is NULL, an internal queue will be used. Before
12240 returning to the caller, the sequence POST_P is appended to
12241 the main output sequence PRE_P.
12243 GIMPLE_TEST_F points to a function that takes a tree T and
12244 returns nonzero if T is in the GIMPLE form requested by the
12245 caller. The GIMPLE predicates are in gimple.c.
12247 FALLBACK tells the function what sort of a temporary we want if
12248 gimplification cannot produce an expression that complies with
12249 GIMPLE_TEST_F.
12251 fb_none means that no temporary should be generated
12252 fb_rvalue means that an rvalue is OK to generate
12253 fb_lvalue means that an lvalue is OK to generate
12254 fb_either means that either is OK, but an lvalue is preferable.
12255 fb_mayfail means that gimplification may fail (in which case
12256 GS_ERROR will be returned)
12258 The return value is either GS_ERROR or GS_ALL_DONE, since this
12259 function iterates until EXPR is completely gimplified or an error
12260 occurs. */
12262 enum gimplify_status
12263 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12264 bool (*gimple_test_f) (tree), fallback_t fallback)
12266 tree tmp;
12267 gimple_seq internal_pre = NULL;
12268 gimple_seq internal_post = NULL;
12269 tree save_expr;
12270 bool is_statement;
12271 location_t saved_location;
12272 enum gimplify_status ret;
12273 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
12274 tree label;
12276 save_expr = *expr_p;
12277 if (save_expr == NULL_TREE)
12278 return GS_ALL_DONE;
12280 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
12281 is_statement = gimple_test_f == is_gimple_stmt;
12282 if (is_statement)
12283 gcc_assert (pre_p);
12285 /* Consistency checks. */
12286 if (gimple_test_f == is_gimple_reg)
12287 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
12288 else if (gimple_test_f == is_gimple_val
12289 || gimple_test_f == is_gimple_call_addr
12290 || gimple_test_f == is_gimple_condexpr
12291 || gimple_test_f == is_gimple_mem_rhs
12292 || gimple_test_f == is_gimple_mem_rhs_or_call
12293 || gimple_test_f == is_gimple_reg_rhs
12294 || gimple_test_f == is_gimple_reg_rhs_or_call
12295 || gimple_test_f == is_gimple_asm_val
12296 || gimple_test_f == is_gimple_mem_ref_addr)
12297 gcc_assert (fallback & fb_rvalue);
12298 else if (gimple_test_f == is_gimple_min_lval
12299 || gimple_test_f == is_gimple_lvalue)
12300 gcc_assert (fallback & fb_lvalue);
12301 else if (gimple_test_f == is_gimple_addressable)
12302 gcc_assert (fallback & fb_either);
12303 else if (gimple_test_f == is_gimple_stmt)
12304 gcc_assert (fallback == fb_none);
12305 else
12307 /* We should have recognized the GIMPLE_TEST_F predicate to
12308 know what kind of fallback to use in case a temporary is
12309 needed to hold the value or address of *EXPR_P. */
12310 gcc_unreachable ();
12313 /* We used to check the predicate here and return immediately if it
12314 succeeds. This is wrong; the design is for gimplification to be
12315 idempotent, and for the predicates to only test for valid forms, not
12316 whether they are fully simplified. */
12317 if (pre_p == NULL)
12318 pre_p = &internal_pre;
12320 if (post_p == NULL)
12321 post_p = &internal_post;
12323 /* Remember the last statements added to PRE_P and POST_P. Every
12324 new statement added by the gimplification helpers needs to be
12325 annotated with location information. To centralize the
12326 responsibility, we remember the last statement that had been
12327 added to both queues before gimplifying *EXPR_P. If
12328 gimplification produces new statements in PRE_P and POST_P, those
12329 statements will be annotated with the same location information
12330 as *EXPR_P. */
12331 pre_last_gsi = gsi_last (*pre_p);
12332 post_last_gsi = gsi_last (*post_p);
12334 saved_location = input_location;
12335 if (save_expr != error_mark_node
12336 && EXPR_HAS_LOCATION (*expr_p))
12337 input_location = EXPR_LOCATION (*expr_p);
12339 /* Loop over the specific gimplifiers until the toplevel node
12340 remains the same. */
12343 /* Strip away as many useless type conversions as possible
12344 at the toplevel. */
12345 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
12347 /* Remember the expr. */
12348 save_expr = *expr_p;
12350 /* Die, die, die, my darling. */
12351 if (error_operand_p (save_expr))
12353 ret = GS_ERROR;
12354 break;
12357 /* Do any language-specific gimplification. */
12358 ret = ((enum gimplify_status)
12359 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
12360 if (ret == GS_OK)
12362 if (*expr_p == NULL_TREE)
12363 break;
12364 if (*expr_p != save_expr)
12365 continue;
12367 else if (ret != GS_UNHANDLED)
12368 break;
12370 /* Make sure that all the cases set 'ret' appropriately. */
12371 ret = GS_UNHANDLED;
12372 switch (TREE_CODE (*expr_p))
12374 /* First deal with the special cases. */
12376 case POSTINCREMENT_EXPR:
12377 case POSTDECREMENT_EXPR:
12378 case PREINCREMENT_EXPR:
12379 case PREDECREMENT_EXPR:
12380 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
12381 fallback != fb_none,
12382 TREE_TYPE (*expr_p));
12383 break;
12385 case VIEW_CONVERT_EXPR:
12386 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
12387 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
12389 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12390 post_p, is_gimple_val, fb_rvalue);
12391 recalculate_side_effects (*expr_p);
12392 break;
12394 /* Fallthru. */
12396 case ARRAY_REF:
12397 case ARRAY_RANGE_REF:
12398 case REALPART_EXPR:
12399 case IMAGPART_EXPR:
12400 case COMPONENT_REF:
12401 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
12402 fallback ? fallback : fb_rvalue);
12403 break;
12405 case COND_EXPR:
12406 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
12408 /* C99 code may assign to an array in a structure value of a
12409 conditional expression, and this has undefined behavior
12410 only on execution, so create a temporary if an lvalue is
12411 required. */
12412 if (fallback == fb_lvalue)
12414 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12415 mark_addressable (*expr_p);
12416 ret = GS_OK;
12418 break;
12420 case CALL_EXPR:
12421 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
12423 /* C99 code may assign to an array in a structure returned
12424 from a function, and this has undefined behavior only on
12425 execution, so create a temporary if an lvalue is
12426 required. */
12427 if (fallback == fb_lvalue)
12429 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12430 mark_addressable (*expr_p);
12431 ret = GS_OK;
12433 break;
12435 case TREE_LIST:
12436 gcc_unreachable ();
12438 case COMPOUND_EXPR:
12439 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
12440 break;
12442 case COMPOUND_LITERAL_EXPR:
12443 ret = gimplify_compound_literal_expr (expr_p, pre_p,
12444 gimple_test_f, fallback);
12445 break;
12447 case MODIFY_EXPR:
12448 case INIT_EXPR:
12449 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
12450 fallback != fb_none);
12451 break;
12453 case TRUTH_ANDIF_EXPR:
12454 case TRUTH_ORIF_EXPR:
12456 /* Preserve the original type of the expression and the
12457 source location of the outer expression. */
12458 tree org_type = TREE_TYPE (*expr_p);
12459 *expr_p = gimple_boolify (*expr_p);
12460 *expr_p = build3_loc (input_location, COND_EXPR,
12461 org_type, *expr_p,
12462 fold_convert_loc
12463 (input_location,
12464 org_type, boolean_true_node),
12465 fold_convert_loc
12466 (input_location,
12467 org_type, boolean_false_node));
12468 ret = GS_OK;
12469 break;
12472 case TRUTH_NOT_EXPR:
12474 tree type = TREE_TYPE (*expr_p);
12475 /* The parsers are careful to generate TRUTH_NOT_EXPR
12476 only with operands that are always zero or one.
12477 We do not fold here but handle the only interesting case
12478 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
12479 *expr_p = gimple_boolify (*expr_p);
12480 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
12481 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
12482 TREE_TYPE (*expr_p),
12483 TREE_OPERAND (*expr_p, 0));
12484 else
12485 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
12486 TREE_TYPE (*expr_p),
12487 TREE_OPERAND (*expr_p, 0),
12488 build_int_cst (TREE_TYPE (*expr_p), 1));
12489 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
12490 *expr_p = fold_convert_loc (input_location, type, *expr_p);
12491 ret = GS_OK;
12492 break;
12495 case ADDR_EXPR:
12496 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
12497 break;
12499 case ANNOTATE_EXPR:
12501 tree cond = TREE_OPERAND (*expr_p, 0);
12502 tree kind = TREE_OPERAND (*expr_p, 1);
12503 tree data = TREE_OPERAND (*expr_p, 2);
12504 tree type = TREE_TYPE (cond);
12505 if (!INTEGRAL_TYPE_P (type))
12507 *expr_p = cond;
12508 ret = GS_OK;
12509 break;
12511 tree tmp = create_tmp_var (type);
12512 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
12513 gcall *call
12514 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
12515 gimple_call_set_lhs (call, tmp);
12516 gimplify_seq_add_stmt (pre_p, call);
12517 *expr_p = tmp;
12518 ret = GS_ALL_DONE;
12519 break;
12522 case VA_ARG_EXPR:
12523 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
12524 break;
12526 CASE_CONVERT:
12527 if (IS_EMPTY_STMT (*expr_p))
12529 ret = GS_ALL_DONE;
12530 break;
12533 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
12534 || fallback == fb_none)
12536 /* Just strip a conversion to void (or in void context) and
12537 try again. */
12538 *expr_p = TREE_OPERAND (*expr_p, 0);
12539 ret = GS_OK;
12540 break;
12543 ret = gimplify_conversion (expr_p);
12544 if (ret == GS_ERROR)
12545 break;
12546 if (*expr_p != save_expr)
12547 break;
12548 /* FALLTHRU */
12550 case FIX_TRUNC_EXPR:
12551 /* unary_expr: ... | '(' cast ')' val | ... */
12552 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12553 is_gimple_val, fb_rvalue);
12554 recalculate_side_effects (*expr_p);
12555 break;
12557 case INDIRECT_REF:
12559 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
12560 bool notrap = TREE_THIS_NOTRAP (*expr_p);
12561 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
12563 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
12564 if (*expr_p != save_expr)
12566 ret = GS_OK;
12567 break;
12570 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12571 is_gimple_reg, fb_rvalue);
12572 if (ret == GS_ERROR)
12573 break;
12575 recalculate_side_effects (*expr_p);
12576 *expr_p = fold_build2_loc (input_location, MEM_REF,
12577 TREE_TYPE (*expr_p),
12578 TREE_OPERAND (*expr_p, 0),
12579 build_int_cst (saved_ptr_type, 0));
12580 TREE_THIS_VOLATILE (*expr_p) = volatilep;
12581 TREE_THIS_NOTRAP (*expr_p) = notrap;
12582 ret = GS_OK;
12583 break;
12586 /* We arrive here through the various re-gimplifcation paths. */
12587 case MEM_REF:
12588 /* First try re-folding the whole thing. */
12589 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
12590 TREE_OPERAND (*expr_p, 0),
12591 TREE_OPERAND (*expr_p, 1));
12592 if (tmp)
12594 REF_REVERSE_STORAGE_ORDER (tmp)
12595 = REF_REVERSE_STORAGE_ORDER (*expr_p);
12596 *expr_p = tmp;
12597 recalculate_side_effects (*expr_p);
12598 ret = GS_OK;
12599 break;
12601 /* Avoid re-gimplifying the address operand if it is already
12602 in suitable form. Re-gimplifying would mark the address
12603 operand addressable. Always gimplify when not in SSA form
12604 as we still may have to gimplify decls with value-exprs. */
12605 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
12606 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
12608 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12609 is_gimple_mem_ref_addr, fb_rvalue);
12610 if (ret == GS_ERROR)
12611 break;
12613 recalculate_side_effects (*expr_p);
12614 ret = GS_ALL_DONE;
12615 break;
12617 /* Constants need not be gimplified. */
12618 case INTEGER_CST:
12619 case REAL_CST:
12620 case FIXED_CST:
12621 case STRING_CST:
12622 case COMPLEX_CST:
12623 case VECTOR_CST:
12624 /* Drop the overflow flag on constants, we do not want
12625 that in the GIMPLE IL. */
12626 if (TREE_OVERFLOW_P (*expr_p))
12627 *expr_p = drop_tree_overflow (*expr_p);
12628 ret = GS_ALL_DONE;
12629 break;
12631 case CONST_DECL:
12632 /* If we require an lvalue, such as for ADDR_EXPR, retain the
12633 CONST_DECL node. Otherwise the decl is replaceable by its
12634 value. */
12635 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
12636 if (fallback & fb_lvalue)
12637 ret = GS_ALL_DONE;
12638 else
12640 *expr_p = DECL_INITIAL (*expr_p);
12641 ret = GS_OK;
12643 break;
12645 case DECL_EXPR:
12646 ret = gimplify_decl_expr (expr_p, pre_p);
12647 break;
12649 case BIND_EXPR:
12650 ret = gimplify_bind_expr (expr_p, pre_p);
12651 break;
12653 case LOOP_EXPR:
12654 ret = gimplify_loop_expr (expr_p, pre_p);
12655 break;
12657 case SWITCH_EXPR:
12658 ret = gimplify_switch_expr (expr_p, pre_p);
12659 break;
12661 case EXIT_EXPR:
12662 ret = gimplify_exit_expr (expr_p);
12663 break;
12665 case GOTO_EXPR:
12666 /* If the target is not LABEL, then it is a computed jump
12667 and the target needs to be gimplified. */
12668 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
12670 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
12671 NULL, is_gimple_val, fb_rvalue);
12672 if (ret == GS_ERROR)
12673 break;
12675 gimplify_seq_add_stmt (pre_p,
12676 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
12677 ret = GS_ALL_DONE;
12678 break;
12680 case PREDICT_EXPR:
12681 gimplify_seq_add_stmt (pre_p,
12682 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
12683 PREDICT_EXPR_OUTCOME (*expr_p)));
12684 ret = GS_ALL_DONE;
12685 break;
12687 case LABEL_EXPR:
12688 ret = gimplify_label_expr (expr_p, pre_p);
12689 label = LABEL_EXPR_LABEL (*expr_p);
12690 gcc_assert (decl_function_context (label) == current_function_decl);
12692 /* If the label is used in a goto statement, or address of the label
12693 is taken, we need to unpoison all variables that were seen so far.
12694 Doing so would prevent us from reporting a false positives. */
12695 if (asan_poisoned_variables
12696 && asan_used_labels != NULL
12697 && asan_used_labels->contains (label))
12698 asan_poison_variables (asan_poisoned_variables, false, pre_p);
12699 break;
12701 case CASE_LABEL_EXPR:
12702 ret = gimplify_case_label_expr (expr_p, pre_p);
12704 if (gimplify_ctxp->live_switch_vars)
12705 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
12706 pre_p);
12707 break;
12709 case RETURN_EXPR:
12710 ret = gimplify_return_expr (*expr_p, pre_p);
12711 break;
12713 case CONSTRUCTOR:
12714 /* Don't reduce this in place; let gimplify_init_constructor work its
12715 magic. Buf if we're just elaborating this for side effects, just
12716 gimplify any element that has side-effects. */
12717 if (fallback == fb_none)
12719 unsigned HOST_WIDE_INT ix;
12720 tree val;
12721 tree temp = NULL_TREE;
12722 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
12723 if (TREE_SIDE_EFFECTS (val))
12724 append_to_statement_list (val, &temp);
12726 *expr_p = temp;
12727 ret = temp ? GS_OK : GS_ALL_DONE;
12729 /* C99 code may assign to an array in a constructed
12730 structure or union, and this has undefined behavior only
12731 on execution, so create a temporary if an lvalue is
12732 required. */
12733 else if (fallback == fb_lvalue)
12735 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12736 mark_addressable (*expr_p);
12737 ret = GS_OK;
12739 else
12740 ret = GS_ALL_DONE;
12741 break;
12743 /* The following are special cases that are not handled by the
12744 original GIMPLE grammar. */
12746 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
12747 eliminated. */
12748 case SAVE_EXPR:
12749 ret = gimplify_save_expr (expr_p, pre_p, post_p);
12750 break;
12752 case BIT_FIELD_REF:
12753 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12754 post_p, is_gimple_lvalue, fb_either);
12755 recalculate_side_effects (*expr_p);
12756 break;
12758 case TARGET_MEM_REF:
12760 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
12762 if (TMR_BASE (*expr_p))
12763 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
12764 post_p, is_gimple_mem_ref_addr, fb_either);
12765 if (TMR_INDEX (*expr_p))
12766 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
12767 post_p, is_gimple_val, fb_rvalue);
12768 if (TMR_INDEX2 (*expr_p))
12769 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
12770 post_p, is_gimple_val, fb_rvalue);
12771 /* TMR_STEP and TMR_OFFSET are always integer constants. */
12772 ret = MIN (r0, r1);
12774 break;
12776 case NON_LVALUE_EXPR:
12777 /* This should have been stripped above. */
12778 gcc_unreachable ();
12780 case ASM_EXPR:
12781 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
12782 break;
12784 case TRY_FINALLY_EXPR:
12785 case TRY_CATCH_EXPR:
12787 gimple_seq eval, cleanup;
12788 gtry *try_;
12790 /* Calls to destructors are generated automatically in FINALLY/CATCH
12791 block. They should have location as UNKNOWN_LOCATION. However,
12792 gimplify_call_expr will reset these call stmts to input_location
12793 if it finds stmt's location is unknown. To prevent resetting for
12794 destructors, we set the input_location to unknown.
12795 Note that this only affects the destructor calls in FINALLY/CATCH
12796 block, and will automatically reset to its original value by the
12797 end of gimplify_expr. */
12798 input_location = UNKNOWN_LOCATION;
12799 eval = cleanup = NULL;
12800 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
12801 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
12802 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
12803 if (gimple_seq_empty_p (cleanup))
12805 gimple_seq_add_seq (pre_p, eval);
12806 ret = GS_ALL_DONE;
12807 break;
12809 try_ = gimple_build_try (eval, cleanup,
12810 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
12811 ? GIMPLE_TRY_FINALLY
12812 : GIMPLE_TRY_CATCH);
12813 if (EXPR_HAS_LOCATION (save_expr))
12814 gimple_set_location (try_, EXPR_LOCATION (save_expr));
12815 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
12816 gimple_set_location (try_, saved_location);
12817 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
12818 gimple_try_set_catch_is_cleanup (try_,
12819 TRY_CATCH_IS_CLEANUP (*expr_p));
12820 gimplify_seq_add_stmt (pre_p, try_);
12821 ret = GS_ALL_DONE;
12822 break;
12825 case CLEANUP_POINT_EXPR:
12826 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
12827 break;
12829 case TARGET_EXPR:
12830 ret = gimplify_target_expr (expr_p, pre_p, post_p);
12831 break;
12833 case CATCH_EXPR:
12835 gimple *c;
12836 gimple_seq handler = NULL;
12837 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
12838 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
12839 gimplify_seq_add_stmt (pre_p, c);
12840 ret = GS_ALL_DONE;
12841 break;
12844 case EH_FILTER_EXPR:
12846 gimple *ehf;
12847 gimple_seq failure = NULL;
12849 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
12850 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
12851 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
12852 gimplify_seq_add_stmt (pre_p, ehf);
12853 ret = GS_ALL_DONE;
12854 break;
12857 case OBJ_TYPE_REF:
12859 enum gimplify_status r0, r1;
12860 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
12861 post_p, is_gimple_val, fb_rvalue);
12862 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
12863 post_p, is_gimple_val, fb_rvalue);
12864 TREE_SIDE_EFFECTS (*expr_p) = 0;
12865 ret = MIN (r0, r1);
12867 break;
12869 case LABEL_DECL:
12870 /* We get here when taking the address of a label. We mark
12871 the label as "forced"; meaning it can never be removed and
12872 it is a potential target for any computed goto. */
12873 FORCED_LABEL (*expr_p) = 1;
12874 ret = GS_ALL_DONE;
12875 break;
12877 case STATEMENT_LIST:
12878 ret = gimplify_statement_list (expr_p, pre_p);
12879 break;
12881 case WITH_SIZE_EXPR:
12883 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12884 post_p == &internal_post ? NULL : post_p,
12885 gimple_test_f, fallback);
12886 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12887 is_gimple_val, fb_rvalue);
12888 ret = GS_ALL_DONE;
12890 break;
12892 case VAR_DECL:
12893 case PARM_DECL:
12894 ret = gimplify_var_or_parm_decl (expr_p);
12895 break;
12897 case RESULT_DECL:
12898 /* When within an OMP context, notice uses of variables. */
12899 if (gimplify_omp_ctxp)
12900 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
12901 ret = GS_ALL_DONE;
12902 break;
12904 case DEBUG_EXPR_DECL:
12905 gcc_unreachable ();
12907 case DEBUG_BEGIN_STMT:
12908 gimplify_seq_add_stmt (pre_p,
12909 gimple_build_debug_begin_stmt
12910 (TREE_BLOCK (*expr_p),
12911 EXPR_LOCATION (*expr_p)));
12912 ret = GS_ALL_DONE;
12913 *expr_p = NULL;
12914 break;
12916 case SSA_NAME:
12917 /* Allow callbacks into the gimplifier during optimization. */
12918 ret = GS_ALL_DONE;
12919 break;
12921 case OMP_PARALLEL:
12922 gimplify_omp_parallel (expr_p, pre_p);
12923 ret = GS_ALL_DONE;
12924 break;
12926 case OMP_TASK:
12927 gimplify_omp_task (expr_p, pre_p);
12928 ret = GS_ALL_DONE;
12929 break;
12931 case OMP_FOR:
12932 case OMP_SIMD:
12933 case OMP_DISTRIBUTE:
12934 case OMP_TASKLOOP:
12935 case OACC_LOOP:
12936 ret = gimplify_omp_for (expr_p, pre_p);
12937 break;
12939 case OACC_CACHE:
12940 gimplify_oacc_cache (expr_p, pre_p);
12941 ret = GS_ALL_DONE;
12942 break;
12944 case OACC_DECLARE:
12945 gimplify_oacc_declare (expr_p, pre_p);
12946 ret = GS_ALL_DONE;
12947 break;
12949 case OACC_HOST_DATA:
12950 case OACC_DATA:
12951 case OACC_KERNELS:
12952 case OACC_PARALLEL:
12953 case OMP_SECTIONS:
12954 case OMP_SINGLE:
12955 case OMP_TARGET:
12956 case OMP_TARGET_DATA:
12957 case OMP_TEAMS:
12958 gimplify_omp_workshare (expr_p, pre_p);
12959 ret = GS_ALL_DONE;
12960 break;
12962 case OACC_ENTER_DATA:
12963 case OACC_EXIT_DATA:
12964 case OACC_UPDATE:
12965 case OMP_TARGET_UPDATE:
12966 case OMP_TARGET_ENTER_DATA:
12967 case OMP_TARGET_EXIT_DATA:
12968 gimplify_omp_target_update (expr_p, pre_p);
12969 ret = GS_ALL_DONE;
12970 break;
12972 case OMP_SECTION:
12973 case OMP_MASTER:
12974 case OMP_ORDERED:
12975 case OMP_CRITICAL:
12977 gimple_seq body = NULL;
12978 gimple *g;
12980 gimplify_and_add (OMP_BODY (*expr_p), &body);
12981 switch (TREE_CODE (*expr_p))
12983 case OMP_SECTION:
12984 g = gimple_build_omp_section (body);
12985 break;
12986 case OMP_MASTER:
12987 g = gimple_build_omp_master (body);
12988 break;
12989 case OMP_ORDERED:
12990 g = gimplify_omp_ordered (*expr_p, body);
12991 break;
12992 case OMP_CRITICAL:
12993 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
12994 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
12995 gimplify_adjust_omp_clauses (pre_p, body,
12996 &OMP_CRITICAL_CLAUSES (*expr_p),
12997 OMP_CRITICAL);
12998 g = gimple_build_omp_critical (body,
12999 OMP_CRITICAL_NAME (*expr_p),
13000 OMP_CRITICAL_CLAUSES (*expr_p));
13001 break;
13002 default:
13003 gcc_unreachable ();
13005 gimplify_seq_add_stmt (pre_p, g);
13006 ret = GS_ALL_DONE;
13007 break;
13010 case OMP_TASKGROUP:
13012 gimple_seq body = NULL;
13014 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
13015 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
13016 OMP_TASKGROUP);
13017 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
13018 gimplify_and_add (OMP_BODY (*expr_p), &body);
13019 gimple_seq cleanup = NULL;
13020 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
13021 gimple *g = gimple_build_call (fn, 0);
13022 gimple_seq_add_stmt (&cleanup, g);
13023 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
13024 body = NULL;
13025 gimple_seq_add_stmt (&body, g);
13026 g = gimple_build_omp_taskgroup (body, *pclauses);
13027 gimplify_seq_add_stmt (pre_p, g);
13028 ret = GS_ALL_DONE;
13029 break;
13032 case OMP_ATOMIC:
13033 case OMP_ATOMIC_READ:
13034 case OMP_ATOMIC_CAPTURE_OLD:
13035 case OMP_ATOMIC_CAPTURE_NEW:
13036 ret = gimplify_omp_atomic (expr_p, pre_p);
13037 break;
13039 case TRANSACTION_EXPR:
13040 ret = gimplify_transaction (expr_p, pre_p);
13041 break;
13043 case TRUTH_AND_EXPR:
13044 case TRUTH_OR_EXPR:
13045 case TRUTH_XOR_EXPR:
13047 tree orig_type = TREE_TYPE (*expr_p);
13048 tree new_type, xop0, xop1;
13049 *expr_p = gimple_boolify (*expr_p);
13050 new_type = TREE_TYPE (*expr_p);
13051 if (!useless_type_conversion_p (orig_type, new_type))
13053 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
13054 ret = GS_OK;
13055 break;
13058 /* Boolified binary truth expressions are semantically equivalent
13059 to bitwise binary expressions. Canonicalize them to the
13060 bitwise variant. */
13061 switch (TREE_CODE (*expr_p))
13063 case TRUTH_AND_EXPR:
13064 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
13065 break;
13066 case TRUTH_OR_EXPR:
13067 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
13068 break;
13069 case TRUTH_XOR_EXPR:
13070 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
13071 break;
13072 default:
13073 break;
13075 /* Now make sure that operands have compatible type to
13076 expression's new_type. */
13077 xop0 = TREE_OPERAND (*expr_p, 0);
13078 xop1 = TREE_OPERAND (*expr_p, 1);
13079 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
13080 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
13081 new_type,
13082 xop0);
13083 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
13084 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
13085 new_type,
13086 xop1);
13087 /* Continue classified as tcc_binary. */
13088 goto expr_2;
13091 case VEC_COND_EXPR:
13093 enum gimplify_status r0, r1, r2;
13095 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13096 post_p, is_gimple_condexpr, fb_rvalue);
13097 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13098 post_p, is_gimple_val, fb_rvalue);
13099 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
13100 post_p, is_gimple_val, fb_rvalue);
13102 ret = MIN (MIN (r0, r1), r2);
13103 recalculate_side_effects (*expr_p);
13105 break;
13107 case VEC_PERM_EXPR:
13108 /* Classified as tcc_expression. */
13109 goto expr_3;
13111 case BIT_INSERT_EXPR:
13112 /* Argument 3 is a constant. */
13113 goto expr_2;
13115 case POINTER_PLUS_EXPR:
13117 enum gimplify_status r0, r1;
13118 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13119 post_p, is_gimple_val, fb_rvalue);
13120 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13121 post_p, is_gimple_val, fb_rvalue);
13122 recalculate_side_effects (*expr_p);
13123 ret = MIN (r0, r1);
13124 break;
13127 default:
13128 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
13130 case tcc_comparison:
13131 /* Handle comparison of objects of non scalar mode aggregates
13132 with a call to memcmp. It would be nice to only have to do
13133 this for variable-sized objects, but then we'd have to allow
13134 the same nest of reference nodes we allow for MODIFY_EXPR and
13135 that's too complex.
13137 Compare scalar mode aggregates as scalar mode values. Using
13138 memcmp for them would be very inefficient at best, and is
13139 plain wrong if bitfields are involved. */
13141 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
13143 /* Vector comparisons need no boolification. */
13144 if (TREE_CODE (type) == VECTOR_TYPE)
13145 goto expr_2;
13146 else if (!AGGREGATE_TYPE_P (type))
13148 tree org_type = TREE_TYPE (*expr_p);
13149 *expr_p = gimple_boolify (*expr_p);
13150 if (!useless_type_conversion_p (org_type,
13151 TREE_TYPE (*expr_p)))
13153 *expr_p = fold_convert_loc (input_location,
13154 org_type, *expr_p);
13155 ret = GS_OK;
13157 else
13158 goto expr_2;
13160 else if (TYPE_MODE (type) != BLKmode)
13161 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
13162 else
13163 ret = gimplify_variable_sized_compare (expr_p);
13165 break;
13168 /* If *EXPR_P does not need to be special-cased, handle it
13169 according to its class. */
13170 case tcc_unary:
13171 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13172 post_p, is_gimple_val, fb_rvalue);
13173 break;
13175 case tcc_binary:
13176 expr_2:
13178 enum gimplify_status r0, r1;
13180 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13181 post_p, is_gimple_val, fb_rvalue);
13182 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13183 post_p, is_gimple_val, fb_rvalue);
13185 ret = MIN (r0, r1);
13186 break;
13189 expr_3:
13191 enum gimplify_status r0, r1, r2;
13193 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13194 post_p, is_gimple_val, fb_rvalue);
13195 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13196 post_p, is_gimple_val, fb_rvalue);
13197 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
13198 post_p, is_gimple_val, fb_rvalue);
13200 ret = MIN (MIN (r0, r1), r2);
13201 break;
13204 case tcc_declaration:
13205 case tcc_constant:
13206 ret = GS_ALL_DONE;
13207 goto dont_recalculate;
13209 default:
13210 gcc_unreachable ();
13213 recalculate_side_effects (*expr_p);
13215 dont_recalculate:
13216 break;
13219 gcc_assert (*expr_p || ret != GS_OK);
13221 while (ret == GS_OK);
13223 /* If we encountered an error_mark somewhere nested inside, either
13224 stub out the statement or propagate the error back out. */
13225 if (ret == GS_ERROR)
13227 if (is_statement)
13228 *expr_p = NULL;
13229 goto out;
13232 /* This was only valid as a return value from the langhook, which
13233 we handled. Make sure it doesn't escape from any other context. */
13234 gcc_assert (ret != GS_UNHANDLED);
13236 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
13238 /* We aren't looking for a value, and we don't have a valid
13239 statement. If it doesn't have side-effects, throw it away.
13240 We can also get here with code such as "*&&L;", where L is
13241 a LABEL_DECL that is marked as FORCED_LABEL. */
13242 if (TREE_CODE (*expr_p) == LABEL_DECL
13243 || !TREE_SIDE_EFFECTS (*expr_p))
13244 *expr_p = NULL;
13245 else if (!TREE_THIS_VOLATILE (*expr_p))
13247 /* This is probably a _REF that contains something nested that
13248 has side effects. Recurse through the operands to find it. */
13249 enum tree_code code = TREE_CODE (*expr_p);
13251 switch (code)
13253 case COMPONENT_REF:
13254 case REALPART_EXPR:
13255 case IMAGPART_EXPR:
13256 case VIEW_CONVERT_EXPR:
13257 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13258 gimple_test_f, fallback);
13259 break;
13261 case ARRAY_REF:
13262 case ARRAY_RANGE_REF:
13263 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13264 gimple_test_f, fallback);
13265 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
13266 gimple_test_f, fallback);
13267 break;
13269 default:
13270 /* Anything else with side-effects must be converted to
13271 a valid statement before we get here. */
13272 gcc_unreachable ();
13275 *expr_p = NULL;
13277 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
13278 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
13280 /* Historically, the compiler has treated a bare reference
13281 to a non-BLKmode volatile lvalue as forcing a load. */
13282 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
13284 /* Normally, we do not want to create a temporary for a
13285 TREE_ADDRESSABLE type because such a type should not be
13286 copied by bitwise-assignment. However, we make an
13287 exception here, as all we are doing here is ensuring that
13288 we read the bytes that make up the type. We use
13289 create_tmp_var_raw because create_tmp_var will abort when
13290 given a TREE_ADDRESSABLE type. */
13291 tree tmp = create_tmp_var_raw (type, "vol");
13292 gimple_add_tmp_var (tmp);
13293 gimplify_assign (tmp, *expr_p, pre_p);
13294 *expr_p = NULL;
13296 else
13297 /* We can't do anything useful with a volatile reference to
13298 an incomplete type, so just throw it away. Likewise for
13299 a BLKmode type, since any implicit inner load should
13300 already have been turned into an explicit one by the
13301 gimplification process. */
13302 *expr_p = NULL;
13305 /* If we are gimplifying at the statement level, we're done. Tack
13306 everything together and return. */
13307 if (fallback == fb_none || is_statement)
13309 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
13310 it out for GC to reclaim it. */
13311 *expr_p = NULL_TREE;
13313 if (!gimple_seq_empty_p (internal_pre)
13314 || !gimple_seq_empty_p (internal_post))
13316 gimplify_seq_add_seq (&internal_pre, internal_post);
13317 gimplify_seq_add_seq (pre_p, internal_pre);
13320 /* The result of gimplifying *EXPR_P is going to be the last few
13321 statements in *PRE_P and *POST_P. Add location information
13322 to all the statements that were added by the gimplification
13323 helpers. */
13324 if (!gimple_seq_empty_p (*pre_p))
13325 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
13327 if (!gimple_seq_empty_p (*post_p))
13328 annotate_all_with_location_after (*post_p, post_last_gsi,
13329 input_location);
13331 goto out;
13334 #ifdef ENABLE_GIMPLE_CHECKING
13335 if (*expr_p)
13337 enum tree_code code = TREE_CODE (*expr_p);
13338 /* These expressions should already be in gimple IR form. */
13339 gcc_assert (code != MODIFY_EXPR
13340 && code != ASM_EXPR
13341 && code != BIND_EXPR
13342 && code != CATCH_EXPR
13343 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
13344 && code != EH_FILTER_EXPR
13345 && code != GOTO_EXPR
13346 && code != LABEL_EXPR
13347 && code != LOOP_EXPR
13348 && code != SWITCH_EXPR
13349 && code != TRY_FINALLY_EXPR
13350 && code != OACC_PARALLEL
13351 && code != OACC_KERNELS
13352 && code != OACC_DATA
13353 && code != OACC_HOST_DATA
13354 && code != OACC_DECLARE
13355 && code != OACC_UPDATE
13356 && code != OACC_ENTER_DATA
13357 && code != OACC_EXIT_DATA
13358 && code != OACC_CACHE
13359 && code != OMP_CRITICAL
13360 && code != OMP_FOR
13361 && code != OACC_LOOP
13362 && code != OMP_MASTER
13363 && code != OMP_TASKGROUP
13364 && code != OMP_ORDERED
13365 && code != OMP_PARALLEL
13366 && code != OMP_SECTIONS
13367 && code != OMP_SECTION
13368 && code != OMP_SINGLE);
13370 #endif
13372 /* Otherwise we're gimplifying a subexpression, so the resulting
13373 value is interesting. If it's a valid operand that matches
13374 GIMPLE_TEST_F, we're done. Unless we are handling some
13375 post-effects internally; if that's the case, we need to copy into
13376 a temporary before adding the post-effects to POST_P. */
13377 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
13378 goto out;
13380 /* Otherwise, we need to create a new temporary for the gimplified
13381 expression. */
13383 /* We can't return an lvalue if we have an internal postqueue. The
13384 object the lvalue refers to would (probably) be modified by the
13385 postqueue; we need to copy the value out first, which means an
13386 rvalue. */
13387 if ((fallback & fb_lvalue)
13388 && gimple_seq_empty_p (internal_post)
13389 && is_gimple_addressable (*expr_p))
13391 /* An lvalue will do. Take the address of the expression, store it
13392 in a temporary, and replace the expression with an INDIRECT_REF of
13393 that temporary. */
13394 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
13395 unsigned int ref_align = get_object_alignment (*expr_p);
13396 tree ref_type = TREE_TYPE (*expr_p);
13397 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
13398 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
13399 if (TYPE_ALIGN (ref_type) != ref_align)
13400 ref_type = build_aligned_type (ref_type, ref_align);
13401 *expr_p = build2 (MEM_REF, ref_type,
13402 tmp, build_zero_cst (ref_alias_type));
13404 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
13406 /* An rvalue will do. Assign the gimplified expression into a
13407 new temporary TMP and replace the original expression with
13408 TMP. First, make sure that the expression has a type so that
13409 it can be assigned into a temporary. */
13410 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
13411 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
13413 else
13415 #ifdef ENABLE_GIMPLE_CHECKING
13416 if (!(fallback & fb_mayfail))
13418 fprintf (stderr, "gimplification failed:\n");
13419 print_generic_expr (stderr, *expr_p);
13420 debug_tree (*expr_p);
13421 internal_error ("gimplification failed");
13423 #endif
13424 gcc_assert (fallback & fb_mayfail);
13426 /* If this is an asm statement, and the user asked for the
13427 impossible, don't die. Fail and let gimplify_asm_expr
13428 issue an error. */
13429 ret = GS_ERROR;
13430 goto out;
13433 /* Make sure the temporary matches our predicate. */
13434 gcc_assert ((*gimple_test_f) (*expr_p));
13436 if (!gimple_seq_empty_p (internal_post))
13438 annotate_all_with_location (internal_post, input_location);
13439 gimplify_seq_add_seq (pre_p, internal_post);
13442 out:
13443 input_location = saved_location;
13444 return ret;
13447 /* Like gimplify_expr but make sure the gimplified result is not itself
13448 a SSA name (but a decl if it were). Temporaries required by
13449 evaluating *EXPR_P may be still SSA names. */
13451 static enum gimplify_status
13452 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13453 bool (*gimple_test_f) (tree), fallback_t fallback,
13454 bool allow_ssa)
13456 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
13457 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
13458 gimple_test_f, fallback);
13459 if (! allow_ssa
13460 && TREE_CODE (*expr_p) == SSA_NAME)
13462 tree name = *expr_p;
13463 if (was_ssa_name_p)
13464 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
13465 else
13467 /* Avoid the extra copy if possible. */
13468 *expr_p = create_tmp_reg (TREE_TYPE (name));
13469 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
13470 release_ssa_name (name);
13473 return ret;
13476 /* Look through TYPE for variable-sized objects and gimplify each such
13477 size that we find. Add to LIST_P any statements generated. */
13479 void
13480 gimplify_type_sizes (tree type, gimple_seq *list_p)
13482 tree field, t;
13484 if (type == NULL || type == error_mark_node)
13485 return;
13487 /* We first do the main variant, then copy into any other variants. */
13488 type = TYPE_MAIN_VARIANT (type);
13490 /* Avoid infinite recursion. */
13491 if (TYPE_SIZES_GIMPLIFIED (type))
13492 return;
13494 TYPE_SIZES_GIMPLIFIED (type) = 1;
13496 switch (TREE_CODE (type))
13498 case INTEGER_TYPE:
13499 case ENUMERAL_TYPE:
13500 case BOOLEAN_TYPE:
13501 case REAL_TYPE:
13502 case FIXED_POINT_TYPE:
13503 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
13504 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
13506 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
13508 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
13509 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
13511 break;
13513 case ARRAY_TYPE:
13514 /* These types may not have declarations, so handle them here. */
13515 gimplify_type_sizes (TREE_TYPE (type), list_p);
13516 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
13517 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
13518 with assigned stack slots, for -O1+ -g they should be tracked
13519 by VTA. */
13520 if (!(TYPE_NAME (type)
13521 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
13522 && DECL_IGNORED_P (TYPE_NAME (type)))
13523 && TYPE_DOMAIN (type)
13524 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
13526 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
13527 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
13528 DECL_IGNORED_P (t) = 0;
13529 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
13530 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
13531 DECL_IGNORED_P (t) = 0;
13533 break;
13535 case RECORD_TYPE:
13536 case UNION_TYPE:
13537 case QUAL_UNION_TYPE:
13538 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
13539 if (TREE_CODE (field) == FIELD_DECL)
13541 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
13542 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
13543 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
13544 gimplify_type_sizes (TREE_TYPE (field), list_p);
13546 break;
13548 case POINTER_TYPE:
13549 case REFERENCE_TYPE:
13550 /* We used to recurse on the pointed-to type here, which turned out to
13551 be incorrect because its definition might refer to variables not
13552 yet initialized at this point if a forward declaration is involved.
13554 It was actually useful for anonymous pointed-to types to ensure
13555 that the sizes evaluation dominates every possible later use of the
13556 values. Restricting to such types here would be safe since there
13557 is no possible forward declaration around, but would introduce an
13558 undesirable middle-end semantic to anonymity. We then defer to
13559 front-ends the responsibility of ensuring that the sizes are
13560 evaluated both early and late enough, e.g. by attaching artificial
13561 type declarations to the tree. */
13562 break;
13564 default:
13565 break;
13568 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
13569 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
13571 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
13573 TYPE_SIZE (t) = TYPE_SIZE (type);
13574 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
13575 TYPE_SIZES_GIMPLIFIED (t) = 1;
13579 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
13580 a size or position, has had all of its SAVE_EXPRs evaluated.
13581 We add any required statements to *STMT_P. */
13583 void
13584 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
13586 tree expr = *expr_p;
13588 /* We don't do anything if the value isn't there, is constant, or contains
13589 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
13590 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
13591 will want to replace it with a new variable, but that will cause problems
13592 if this type is from outside the function. It's OK to have that here. */
13593 if (expr == NULL_TREE
13594 || is_gimple_constant (expr)
13595 || TREE_CODE (expr) == VAR_DECL
13596 || CONTAINS_PLACEHOLDER_P (expr))
13597 return;
13599 *expr_p = unshare_expr (expr);
13601 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
13602 if the def vanishes. */
13603 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
13605 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
13606 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
13607 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
13608 if (is_gimple_constant (*expr_p))
13609 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
13612 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
13613 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
13614 is true, also gimplify the parameters. */
13616 gbind *
13617 gimplify_body (tree fndecl, bool do_parms)
13619 location_t saved_location = input_location;
13620 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
13621 gimple *outer_stmt;
13622 gbind *outer_bind;
13624 timevar_push (TV_TREE_GIMPLIFY);
13626 init_tree_ssa (cfun);
13628 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
13629 gimplification. */
13630 default_rtl_profile ();
13632 gcc_assert (gimplify_ctxp == NULL);
13633 push_gimplify_context (true);
13635 if (flag_openacc || flag_openmp)
13637 gcc_assert (gimplify_omp_ctxp == NULL);
13638 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
13639 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
13642 /* Unshare most shared trees in the body and in that of any nested functions.
13643 It would seem we don't have to do this for nested functions because
13644 they are supposed to be output and then the outer function gimplified
13645 first, but the g++ front end doesn't always do it that way. */
13646 unshare_body (fndecl);
13647 unvisit_body (fndecl);
13649 /* Make sure input_location isn't set to something weird. */
13650 input_location = DECL_SOURCE_LOCATION (fndecl);
13652 /* Resolve callee-copies. This has to be done before processing
13653 the body so that DECL_VALUE_EXPR gets processed correctly. */
13654 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
13656 /* Gimplify the function's body. */
13657 seq = NULL;
13658 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
13659 outer_stmt = gimple_seq_first_stmt (seq);
13660 if (!outer_stmt)
13662 outer_stmt = gimple_build_nop ();
13663 gimplify_seq_add_stmt (&seq, outer_stmt);
13666 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
13667 not the case, wrap everything in a GIMPLE_BIND to make it so. */
13668 if (gimple_code (outer_stmt) == GIMPLE_BIND
13669 && gimple_seq_first (seq) == gimple_seq_last (seq))
13670 outer_bind = as_a <gbind *> (outer_stmt);
13671 else
13672 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
13674 DECL_SAVED_TREE (fndecl) = NULL_TREE;
13676 /* If we had callee-copies statements, insert them at the beginning
13677 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
13678 if (!gimple_seq_empty_p (parm_stmts))
13680 tree parm;
13682 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
13683 if (parm_cleanup)
13685 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
13686 GIMPLE_TRY_FINALLY);
13687 parm_stmts = NULL;
13688 gimple_seq_add_stmt (&parm_stmts, g);
13690 gimple_bind_set_body (outer_bind, parm_stmts);
13692 for (parm = DECL_ARGUMENTS (current_function_decl);
13693 parm; parm = DECL_CHAIN (parm))
13694 if (DECL_HAS_VALUE_EXPR_P (parm))
13696 DECL_HAS_VALUE_EXPR_P (parm) = 0;
13697 DECL_IGNORED_P (parm) = 0;
13701 if ((flag_openacc || flag_openmp || flag_openmp_simd)
13702 && gimplify_omp_ctxp)
13704 delete_omp_context (gimplify_omp_ctxp);
13705 gimplify_omp_ctxp = NULL;
13708 pop_gimplify_context (outer_bind);
13709 gcc_assert (gimplify_ctxp == NULL);
13711 if (flag_checking && !seen_error ())
13712 verify_gimple_in_seq (gimple_bind_body (outer_bind));
13714 timevar_pop (TV_TREE_GIMPLIFY);
13715 input_location = saved_location;
13717 return outer_bind;
13720 typedef char *char_p; /* For DEF_VEC_P. */
13722 /* Return whether we should exclude FNDECL from instrumentation. */
13724 static bool
13725 flag_instrument_functions_exclude_p (tree fndecl)
13727 vec<char_p> *v;
13729 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
13730 if (v && v->length () > 0)
13732 const char *name;
13733 int i;
13734 char *s;
13736 name = lang_hooks.decl_printable_name (fndecl, 0);
13737 FOR_EACH_VEC_ELT (*v, i, s)
13738 if (strstr (name, s) != NULL)
13739 return true;
13742 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
13743 if (v && v->length () > 0)
13745 const char *name;
13746 int i;
13747 char *s;
13749 name = DECL_SOURCE_FILE (fndecl);
13750 FOR_EACH_VEC_ELT (*v, i, s)
13751 if (strstr (name, s) != NULL)
13752 return true;
13755 return false;
13758 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
13759 node for the function we want to gimplify.
13761 Return the sequence of GIMPLE statements corresponding to the body
13762 of FNDECL. */
13764 void
13765 gimplify_function_tree (tree fndecl)
13767 tree parm, ret;
13768 gimple_seq seq;
13769 gbind *bind;
13771 gcc_assert (!gimple_body (fndecl));
13773 if (DECL_STRUCT_FUNCTION (fndecl))
13774 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
13775 else
13776 push_struct_function (fndecl);
13778 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
13779 if necessary. */
13780 cfun->curr_properties |= PROP_gimple_lva;
13782 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
13784 /* Preliminarily mark non-addressed complex variables as eligible
13785 for promotion to gimple registers. We'll transform their uses
13786 as we find them. */
13787 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
13788 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
13789 && !TREE_THIS_VOLATILE (parm)
13790 && !needs_to_live_in_memory (parm))
13791 DECL_GIMPLE_REG_P (parm) = 1;
13794 ret = DECL_RESULT (fndecl);
13795 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
13796 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
13797 && !needs_to_live_in_memory (ret))
13798 DECL_GIMPLE_REG_P (ret) = 1;
13800 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
13801 asan_poisoned_variables = new hash_set<tree> ();
13802 bind = gimplify_body (fndecl, true);
13803 if (asan_poisoned_variables)
13805 delete asan_poisoned_variables;
13806 asan_poisoned_variables = NULL;
13809 /* The tree body of the function is no longer needed, replace it
13810 with the new GIMPLE body. */
13811 seq = NULL;
13812 gimple_seq_add_stmt (&seq, bind);
13813 gimple_set_body (fndecl, seq);
13815 /* If we're instrumenting function entry/exit, then prepend the call to
13816 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
13817 catch the exit hook. */
13818 /* ??? Add some way to ignore exceptions for this TFE. */
13819 if (flag_instrument_function_entry_exit
13820 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
13821 /* Do not instrument extern inline functions. */
13822 && !(DECL_DECLARED_INLINE_P (fndecl)
13823 && DECL_EXTERNAL (fndecl)
13824 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
13825 && !flag_instrument_functions_exclude_p (fndecl))
13827 tree x;
13828 gbind *new_bind;
13829 gimple *tf;
13830 gimple_seq cleanup = NULL, body = NULL;
13831 tree tmp_var, this_fn_addr;
13832 gcall *call;
13834 /* The instrumentation hooks aren't going to call the instrumented
13835 function and the address they receive is expected to be matchable
13836 against symbol addresses. Make sure we don't create a trampoline,
13837 in case the current function is nested. */
13838 this_fn_addr = build_fold_addr_expr (current_function_decl);
13839 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
13841 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
13842 call = gimple_build_call (x, 1, integer_zero_node);
13843 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
13844 gimple_call_set_lhs (call, tmp_var);
13845 gimplify_seq_add_stmt (&cleanup, call);
13846 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
13847 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
13848 gimplify_seq_add_stmt (&cleanup, call);
13849 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
13851 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
13852 call = gimple_build_call (x, 1, integer_zero_node);
13853 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
13854 gimple_call_set_lhs (call, tmp_var);
13855 gimplify_seq_add_stmt (&body, call);
13856 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
13857 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
13858 gimplify_seq_add_stmt (&body, call);
13859 gimplify_seq_add_stmt (&body, tf);
13860 new_bind = gimple_build_bind (NULL, body, NULL);
13862 /* Replace the current function body with the body
13863 wrapped in the try/finally TF. */
13864 seq = NULL;
13865 gimple_seq_add_stmt (&seq, new_bind);
13866 gimple_set_body (fndecl, seq);
13867 bind = new_bind;
13870 if (sanitize_flags_p (SANITIZE_THREAD))
13872 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
13873 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
13874 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
13875 /* Replace the current function body with the body
13876 wrapped in the try/finally TF. */
13877 seq = NULL;
13878 gimple_seq_add_stmt (&seq, new_bind);
13879 gimple_set_body (fndecl, seq);
13882 DECL_SAVED_TREE (fndecl) = NULL_TREE;
13883 cfun->curr_properties |= PROP_gimple_any;
13885 pop_cfun ();
13887 dump_function (TDI_gimple, fndecl);
13890 /* Return a dummy expression of type TYPE in order to keep going after an
13891 error. */
13893 static tree
13894 dummy_object (tree type)
13896 tree t = build_int_cst (build_pointer_type (type), 0);
13897 return build2 (MEM_REF, type, t, t);
13900 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
13901 builtin function, but a very special sort of operator. */
13903 enum gimplify_status
13904 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
13905 gimple_seq *post_p ATTRIBUTE_UNUSED)
13907 tree promoted_type, have_va_type;
13908 tree valist = TREE_OPERAND (*expr_p, 0);
13909 tree type = TREE_TYPE (*expr_p);
13910 tree t, tag, aptag;
13911 location_t loc = EXPR_LOCATION (*expr_p);
13913 /* Verify that valist is of the proper type. */
13914 have_va_type = TREE_TYPE (valist);
13915 if (have_va_type == error_mark_node)
13916 return GS_ERROR;
13917 have_va_type = targetm.canonical_va_list_type (have_va_type);
13918 if (have_va_type == NULL_TREE
13919 && POINTER_TYPE_P (TREE_TYPE (valist)))
13920 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
13921 have_va_type
13922 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
13923 gcc_assert (have_va_type != NULL_TREE);
13925 /* Generate a diagnostic for requesting data of a type that cannot
13926 be passed through `...' due to type promotion at the call site. */
13927 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
13928 != type)
13930 static bool gave_help;
13931 bool warned;
13932 /* Use the expansion point to handle cases such as passing bool (defined
13933 in a system header) through `...'. */
13934 location_t xloc
13935 = expansion_point_location_if_in_system_header (loc);
13937 /* Unfortunately, this is merely undefined, rather than a constraint
13938 violation, so we cannot make this an error. If this call is never
13939 executed, the program is still strictly conforming. */
13940 auto_diagnostic_group d;
13941 warned = warning_at (xloc, 0,
13942 "%qT is promoted to %qT when passed through %<...%>",
13943 type, promoted_type);
13944 if (!gave_help && warned)
13946 gave_help = true;
13947 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
13948 promoted_type, type);
13951 /* We can, however, treat "undefined" any way we please.
13952 Call abort to encourage the user to fix the program. */
13953 if (warned)
13954 inform (xloc, "if this code is reached, the program will abort");
13955 /* Before the abort, allow the evaluation of the va_list
13956 expression to exit or longjmp. */
13957 gimplify_and_add (valist, pre_p);
13958 t = build_call_expr_loc (loc,
13959 builtin_decl_implicit (BUILT_IN_TRAP), 0);
13960 gimplify_and_add (t, pre_p);
13962 /* This is dead code, but go ahead and finish so that the
13963 mode of the result comes out right. */
13964 *expr_p = dummy_object (type);
13965 return GS_ALL_DONE;
13968 tag = build_int_cst (build_pointer_type (type), 0);
13969 aptag = build_int_cst (TREE_TYPE (valist), 0);
13971 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
13972 valist, tag, aptag);
13974 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
13975 needs to be expanded. */
13976 cfun->curr_properties &= ~PROP_gimple_lva;
13978 return GS_OK;
13981 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
13983 DST/SRC are the destination and source respectively. You can pass
13984 ungimplified trees in DST or SRC, in which case they will be
13985 converted to a gimple operand if necessary.
13987 This function returns the newly created GIMPLE_ASSIGN tuple. */
13989 gimple *
13990 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
13992 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
13993 gimplify_and_add (t, seq_p);
13994 ggc_free (t);
13995 return gimple_seq_last_stmt (*seq_p);
13998 inline hashval_t
13999 gimplify_hasher::hash (const elt_t *p)
14001 tree t = p->val;
14002 return iterative_hash_expr (t, 0);
14005 inline bool
14006 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
14008 tree t1 = p1->val;
14009 tree t2 = p2->val;
14010 enum tree_code code = TREE_CODE (t1);
14012 if (TREE_CODE (t2) != code
14013 || TREE_TYPE (t1) != TREE_TYPE (t2))
14014 return false;
14016 if (!operand_equal_p (t1, t2, 0))
14017 return false;
14019 /* Only allow them to compare equal if they also hash equal; otherwise
14020 results are nondeterminate, and we fail bootstrap comparison. */
14021 gcc_checking_assert (hash (p1) == hash (p2));
14023 return true;