Allow target to override gnu-user.h crti and crtn
[official-gcc.git] / gcc / gimplify.c
blob40fbaa2c523177031e50993b5575f821cf519227
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2018 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
69 /* Hash set of poisoned variables in a bind expr. */
70 static hash_set<tree> *asan_poisoned_variables = NULL;
72 enum gimplify_omp_var_data
74 GOVD_SEEN = 1,
75 GOVD_EXPLICIT = 2,
76 GOVD_SHARED = 4,
77 GOVD_PRIVATE = 8,
78 GOVD_FIRSTPRIVATE = 16,
79 GOVD_LASTPRIVATE = 32,
80 GOVD_REDUCTION = 64,
81 GOVD_LOCAL = 128,
82 GOVD_MAP = 256,
83 GOVD_DEBUG_PRIVATE = 512,
84 GOVD_PRIVATE_OUTER_REF = 1024,
85 GOVD_LINEAR = 2048,
86 GOVD_ALIGNED = 4096,
88 /* Flag for GOVD_MAP: don't copy back. */
89 GOVD_MAP_TO_ONLY = 8192,
91 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
92 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
94 GOVD_MAP_0LEN_ARRAY = 32768,
96 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
97 GOVD_MAP_ALWAYS_TO = 65536,
99 /* Flag for shared vars that are or might be stored to in the region. */
100 GOVD_WRITTEN = 131072,
102 /* Flag for GOVD_MAP, if it is a forced mapping. */
103 GOVD_MAP_FORCE = 262144,
105 /* Flag for GOVD_MAP: must be present already. */
106 GOVD_MAP_FORCE_PRESENT = 524288,
108 /* Flag for GOVD_MAP: only allocate. */
109 GOVD_MAP_ALLOC_ONLY = 1048576,
111 /* Flag for GOVD_MAP: only copy back. */
112 GOVD_MAP_FROM_ONLY = 2097152,
114 GOVD_NONTEMPORAL = 4194304,
116 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
117 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
118 | GOVD_LOCAL)
122 enum omp_region_type
124 ORT_WORKSHARE = 0x00,
125 ORT_TASKGROUP = 0x01,
126 ORT_SIMD = 0x04,
128 ORT_PARALLEL = 0x08,
129 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
131 ORT_TASK = 0x10,
132 ORT_UNTIED_TASK = ORT_TASK | 1,
133 ORT_TASKLOOP = ORT_TASK | 2,
134 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
136 ORT_TEAMS = 0x20,
137 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
138 ORT_HOST_TEAMS = ORT_TEAMS | 2,
139 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
141 /* Data region. */
142 ORT_TARGET_DATA = 0x40,
144 /* Data region with offloading. */
145 ORT_TARGET = 0x80,
146 ORT_COMBINED_TARGET = ORT_TARGET | 1,
148 /* OpenACC variants. */
149 ORT_ACC = 0x100, /* A generic OpenACC region. */
150 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
151 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
152 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
153 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
155 /* Dummy OpenMP region, used to disable expansion of
156 DECL_VALUE_EXPRs in taskloop pre body. */
157 ORT_NONE = 0x200
160 /* Gimplify hashtable helper. */
162 struct gimplify_hasher : free_ptr_hash <elt_t>
164 static inline hashval_t hash (const elt_t *);
165 static inline bool equal (const elt_t *, const elt_t *);
168 struct gimplify_ctx
170 struct gimplify_ctx *prev_context;
172 vec<gbind *> bind_expr_stack;
173 tree temps;
174 gimple_seq conditional_cleanups;
175 tree exit_label;
176 tree return_temp;
178 vec<tree> case_labels;
179 hash_set<tree> *live_switch_vars;
180 /* The formal temporary table. Should this be persistent? */
181 hash_table<gimplify_hasher> *temp_htab;
183 int conditions;
184 unsigned into_ssa : 1;
185 unsigned allow_rhs_cond_expr : 1;
186 unsigned in_cleanup_point_expr : 1;
187 unsigned keep_stack : 1;
188 unsigned save_stack : 1;
189 unsigned in_switch_expr : 1;
192 enum gimplify_defaultmap_kind
194 GDMK_SCALAR,
195 GDMK_AGGREGATE,
196 GDMK_ALLOCATABLE,
197 GDMK_POINTER
200 struct gimplify_omp_ctx
202 struct gimplify_omp_ctx *outer_context;
203 splay_tree variables;
204 hash_set<tree> *privatized_types;
205 /* Iteration variables in an OMP_FOR. */
206 vec<tree> loop_iter_var;
207 location_t location;
208 enum omp_clause_default_kind default_kind;
209 enum omp_region_type region_type;
210 bool combined_loop;
211 bool distribute;
212 bool target_firstprivatize_array_bases;
213 int defaultmap[4];
216 static struct gimplify_ctx *gimplify_ctxp;
217 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
219 /* Forward declaration. */
220 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
221 static hash_map<tree, tree> *oacc_declare_returns;
222 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
223 bool (*) (tree), fallback_t, bool);
225 /* Shorter alias name for the above function for use in gimplify.c
226 only. */
228 static inline void
229 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
231 gimple_seq_add_stmt_without_update (seq_p, gs);
234 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
235 NULL, a new sequence is allocated. This function is
236 similar to gimple_seq_add_seq, but does not scan the operands.
237 During gimplification, we need to manipulate statement sequences
238 before the def/use vectors have been constructed. */
240 static void
241 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
243 gimple_stmt_iterator si;
245 if (src == NULL)
246 return;
248 si = gsi_last (*dst_p);
249 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
253 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
254 and popping gimplify contexts. */
256 static struct gimplify_ctx *ctx_pool = NULL;
258 /* Return a gimplify context struct from the pool. */
260 static inline struct gimplify_ctx *
261 ctx_alloc (void)
263 struct gimplify_ctx * c = ctx_pool;
265 if (c)
266 ctx_pool = c->prev_context;
267 else
268 c = XNEW (struct gimplify_ctx);
270 memset (c, '\0', sizeof (*c));
271 return c;
274 /* Put gimplify context C back into the pool. */
276 static inline void
277 ctx_free (struct gimplify_ctx *c)
279 c->prev_context = ctx_pool;
280 ctx_pool = c;
283 /* Free allocated ctx stack memory. */
285 void
286 free_gimplify_stack (void)
288 struct gimplify_ctx *c;
290 while ((c = ctx_pool))
292 ctx_pool = c->prev_context;
293 free (c);
298 /* Set up a context for the gimplifier. */
300 void
301 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
303 struct gimplify_ctx *c = ctx_alloc ();
305 c->prev_context = gimplify_ctxp;
306 gimplify_ctxp = c;
307 gimplify_ctxp->into_ssa = in_ssa;
308 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
311 /* Tear down a context for the gimplifier. If BODY is non-null, then
312 put the temporaries into the outer BIND_EXPR. Otherwise, put them
313 in the local_decls.
315 BODY is not a sequence, but the first tuple in a sequence. */
317 void
318 pop_gimplify_context (gimple *body)
320 struct gimplify_ctx *c = gimplify_ctxp;
322 gcc_assert (c
323 && (!c->bind_expr_stack.exists ()
324 || c->bind_expr_stack.is_empty ()));
325 c->bind_expr_stack.release ();
326 gimplify_ctxp = c->prev_context;
328 if (body)
329 declare_vars (c->temps, body, false);
330 else
331 record_vars (c->temps);
333 delete c->temp_htab;
334 c->temp_htab = NULL;
335 ctx_free (c);
338 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
340 static void
341 gimple_push_bind_expr (gbind *bind_stmt)
343 gimplify_ctxp->bind_expr_stack.reserve (8);
344 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
347 /* Pop the first element off the stack of bindings. */
349 static void
350 gimple_pop_bind_expr (void)
352 gimplify_ctxp->bind_expr_stack.pop ();
355 /* Return the first element of the stack of bindings. */
357 gbind *
358 gimple_current_bind_expr (void)
360 return gimplify_ctxp->bind_expr_stack.last ();
363 /* Return the stack of bindings created during gimplification. */
365 vec<gbind *>
366 gimple_bind_expr_stack (void)
368 return gimplify_ctxp->bind_expr_stack;
371 /* Return true iff there is a COND_EXPR between us and the innermost
372 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
374 static bool
375 gimple_conditional_context (void)
377 return gimplify_ctxp->conditions > 0;
380 /* Note that we've entered a COND_EXPR. */
382 static void
383 gimple_push_condition (void)
385 #ifdef ENABLE_GIMPLE_CHECKING
386 if (gimplify_ctxp->conditions == 0)
387 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
388 #endif
389 ++(gimplify_ctxp->conditions);
392 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
393 now, add any conditional cleanups we've seen to the prequeue. */
395 static void
396 gimple_pop_condition (gimple_seq *pre_p)
398 int conds = --(gimplify_ctxp->conditions);
400 gcc_assert (conds >= 0);
401 if (conds == 0)
403 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
404 gimplify_ctxp->conditional_cleanups = NULL;
408 /* A stable comparison routine for use with splay trees and DECLs. */
410 static int
411 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
413 tree a = (tree) xa;
414 tree b = (tree) xb;
416 return DECL_UID (a) - DECL_UID (b);
419 /* Create a new omp construct that deals with variable remapping. */
421 static struct gimplify_omp_ctx *
422 new_omp_context (enum omp_region_type region_type)
424 struct gimplify_omp_ctx *c;
426 c = XCNEW (struct gimplify_omp_ctx);
427 c->outer_context = gimplify_omp_ctxp;
428 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
429 c->privatized_types = new hash_set<tree>;
430 c->location = input_location;
431 c->region_type = region_type;
432 if ((region_type & ORT_TASK) == 0)
433 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
434 else
435 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
436 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
437 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
438 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
439 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
441 return c;
444 /* Destroy an omp construct that deals with variable remapping. */
446 static void
447 delete_omp_context (struct gimplify_omp_ctx *c)
449 splay_tree_delete (c->variables);
450 delete c->privatized_types;
451 c->loop_iter_var.release ();
452 XDELETE (c);
455 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
456 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
458 /* Both gimplify the statement T and append it to *SEQ_P. This function
459 behaves exactly as gimplify_stmt, but you don't have to pass T as a
460 reference. */
462 void
463 gimplify_and_add (tree t, gimple_seq *seq_p)
465 gimplify_stmt (&t, seq_p);
468 /* Gimplify statement T into sequence *SEQ_P, and return the first
469 tuple in the sequence of generated tuples for this statement.
470 Return NULL if gimplifying T produced no tuples. */
472 static gimple *
473 gimplify_and_return_first (tree t, gimple_seq *seq_p)
475 gimple_stmt_iterator last = gsi_last (*seq_p);
477 gimplify_and_add (t, seq_p);
479 if (!gsi_end_p (last))
481 gsi_next (&last);
482 return gsi_stmt (last);
484 else
485 return gimple_seq_first_stmt (*seq_p);
488 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
489 LHS, or for a call argument. */
491 static bool
492 is_gimple_mem_rhs (tree t)
494 /* If we're dealing with a renamable type, either source or dest must be
495 a renamed variable. */
496 if (is_gimple_reg_type (TREE_TYPE (t)))
497 return is_gimple_val (t);
498 else
499 return is_gimple_val (t) || is_gimple_lvalue (t);
502 /* Return true if T is a CALL_EXPR or an expression that can be
503 assigned to a temporary. Note that this predicate should only be
504 used during gimplification. See the rationale for this in
505 gimplify_modify_expr. */
507 static bool
508 is_gimple_reg_rhs_or_call (tree t)
510 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
511 || TREE_CODE (t) == CALL_EXPR);
514 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
515 this predicate should only be used during gimplification. See the
516 rationale for this in gimplify_modify_expr. */
518 static bool
519 is_gimple_mem_rhs_or_call (tree t)
521 /* If we're dealing with a renamable type, either source or dest must be
522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t)))
524 return is_gimple_val (t);
525 else
526 return (is_gimple_val (t)
527 || is_gimple_lvalue (t)
528 || TREE_CLOBBER_P (t)
529 || TREE_CODE (t) == CALL_EXPR);
532 /* Create a temporary with a name derived from VAL. Subroutine of
533 lookup_tmp_var; nobody else should call this function. */
535 static inline tree
536 create_tmp_from_val (tree val)
538 /* Drop all qualifiers and address-space information from the value type. */
539 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
540 tree var = create_tmp_var (type, get_name (val));
541 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
542 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
543 DECL_GIMPLE_REG_P (var) = 1;
544 return var;
547 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
548 an existing expression temporary. */
550 static tree
551 lookup_tmp_var (tree val, bool is_formal)
553 tree ret;
555 /* If not optimizing, never really reuse a temporary. local-alloc
556 won't allocate any variable that is used in more than one basic
557 block, which means it will go into memory, causing much extra
558 work in reload and final and poorer code generation, outweighing
559 the extra memory allocation here. */
560 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
561 ret = create_tmp_from_val (val);
562 else
564 elt_t elt, *elt_p;
565 elt_t **slot;
567 elt.val = val;
568 if (!gimplify_ctxp->temp_htab)
569 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
570 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
571 if (*slot == NULL)
573 elt_p = XNEW (elt_t);
574 elt_p->val = val;
575 elt_p->temp = ret = create_tmp_from_val (val);
576 *slot = elt_p;
578 else
580 elt_p = *slot;
581 ret = elt_p->temp;
585 return ret;
588 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
590 static tree
591 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
592 bool is_formal, bool allow_ssa)
594 tree t, mod;
596 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
597 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
598 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
599 fb_rvalue);
601 if (allow_ssa
602 && gimplify_ctxp->into_ssa
603 && is_gimple_reg_type (TREE_TYPE (val)))
605 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
606 if (! gimple_in_ssa_p (cfun))
608 const char *name = get_name (val);
609 if (name)
610 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
613 else
614 t = lookup_tmp_var (val, is_formal);
616 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
618 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
620 /* gimplify_modify_expr might want to reduce this further. */
621 gimplify_and_add (mod, pre_p);
622 ggc_free (mod);
624 return t;
627 /* Return a formal temporary variable initialized with VAL. PRE_P is as
628 in gimplify_expr. Only use this function if:
630 1) The value of the unfactored expression represented by VAL will not
631 change between the initialization and use of the temporary, and
632 2) The temporary will not be otherwise modified.
634 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
635 and #2 means it is inappropriate for && temps.
637 For other cases, use get_initialized_tmp_var instead. */
639 tree
640 get_formal_tmp_var (tree val, gimple_seq *pre_p)
642 return internal_get_tmp_var (val, pre_p, NULL, true, true);
645 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
646 are as in gimplify_expr. */
648 tree
649 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
650 bool allow_ssa)
652 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
655 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
656 generate debug info for them; otherwise don't. */
658 void
659 declare_vars (tree vars, gimple *gs, bool debug_info)
661 tree last = vars;
662 if (last)
664 tree temps, block;
666 gbind *scope = as_a <gbind *> (gs);
668 temps = nreverse (last);
670 block = gimple_bind_block (scope);
671 gcc_assert (!block || TREE_CODE (block) == BLOCK);
672 if (!block || !debug_info)
674 DECL_CHAIN (last) = gimple_bind_vars (scope);
675 gimple_bind_set_vars (scope, temps);
677 else
679 /* We need to attach the nodes both to the BIND_EXPR and to its
680 associated BLOCK for debugging purposes. The key point here
681 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
682 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
683 if (BLOCK_VARS (block))
684 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
685 else
687 gimple_bind_set_vars (scope,
688 chainon (gimple_bind_vars (scope), temps));
689 BLOCK_VARS (block) = temps;
695 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
696 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
697 no such upper bound can be obtained. */
699 static void
700 force_constant_size (tree var)
702 /* The only attempt we make is by querying the maximum size of objects
703 of the variable's type. */
705 HOST_WIDE_INT max_size;
707 gcc_assert (VAR_P (var));
709 max_size = max_int_size_in_bytes (TREE_TYPE (var));
711 gcc_assert (max_size >= 0);
713 DECL_SIZE_UNIT (var)
714 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
715 DECL_SIZE (var)
716 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
719 /* Push the temporary variable TMP into the current binding. */
721 void
722 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
724 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
726 /* Later processing assumes that the object size is constant, which might
727 not be true at this point. Force the use of a constant upper bound in
728 this case. */
729 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
730 force_constant_size (tmp);
732 DECL_CONTEXT (tmp) = fn->decl;
733 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
735 record_vars_into (tmp, fn->decl);
738 /* Push the temporary variable TMP into the current binding. */
740 void
741 gimple_add_tmp_var (tree tmp)
743 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
745 /* Later processing assumes that the object size is constant, which might
746 not be true at this point. Force the use of a constant upper bound in
747 this case. */
748 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
749 force_constant_size (tmp);
751 DECL_CONTEXT (tmp) = current_function_decl;
752 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
754 if (gimplify_ctxp)
756 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
757 gimplify_ctxp->temps = tmp;
759 /* Mark temporaries local within the nearest enclosing parallel. */
760 if (gimplify_omp_ctxp)
762 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
763 while (ctx
764 && (ctx->region_type == ORT_WORKSHARE
765 || ctx->region_type == ORT_TASKGROUP
766 || ctx->region_type == ORT_SIMD
767 || ctx->region_type == ORT_ACC))
768 ctx = ctx->outer_context;
769 if (ctx)
770 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
773 else if (cfun)
774 record_vars (tmp);
775 else
777 gimple_seq body_seq;
779 /* This case is for nested functions. We need to expose the locals
780 they create. */
781 body_seq = gimple_body (current_function_decl);
782 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
788 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
789 nodes that are referenced more than once in GENERIC functions. This is
790 necessary because gimplification (translation into GIMPLE) is performed
791 by modifying tree nodes in-place, so gimplication of a shared node in a
792 first context could generate an invalid GIMPLE form in a second context.
794 This is achieved with a simple mark/copy/unmark algorithm that walks the
795 GENERIC representation top-down, marks nodes with TREE_VISITED the first
796 time it encounters them, duplicates them if they already have TREE_VISITED
797 set, and finally removes the TREE_VISITED marks it has set.
799 The algorithm works only at the function level, i.e. it generates a GENERIC
800 representation of a function with no nodes shared within the function when
801 passed a GENERIC function (except for nodes that are allowed to be shared).
803 At the global level, it is also necessary to unshare tree nodes that are
804 referenced in more than one function, for the same aforementioned reason.
805 This requires some cooperation from the front-end. There are 2 strategies:
807 1. Manual unsharing. The front-end needs to call unshare_expr on every
808 expression that might end up being shared across functions.
810 2. Deep unsharing. This is an extension of regular unsharing. Instead
811 of calling unshare_expr on expressions that might be shared across
812 functions, the front-end pre-marks them with TREE_VISITED. This will
813 ensure that they are unshared on the first reference within functions
814 when the regular unsharing algorithm runs. The counterpart is that
815 this algorithm must look deeper than for manual unsharing, which is
816 specified by LANG_HOOKS_DEEP_UNSHARING.
818 If there are only few specific cases of node sharing across functions, it is
819 probably easier for a front-end to unshare the expressions manually. On the
820 contrary, if the expressions generated at the global level are as widespread
821 as expressions generated within functions, deep unsharing is very likely the
822 way to go. */
824 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
825 These nodes model computations that must be done once. If we were to
826 unshare something like SAVE_EXPR(i++), the gimplification process would
827 create wrong code. However, if DATA is non-null, it must hold a pointer
828 set that is used to unshare the subtrees of these nodes. */
830 static tree
831 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
833 tree t = *tp;
834 enum tree_code code = TREE_CODE (t);
836 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
837 copy their subtrees if we can make sure to do it only once. */
838 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
840 if (data && !((hash_set<tree> *)data)->add (t))
842 else
843 *walk_subtrees = 0;
846 /* Stop at types, decls, constants like copy_tree_r. */
847 else if (TREE_CODE_CLASS (code) == tcc_type
848 || TREE_CODE_CLASS (code) == tcc_declaration
849 || TREE_CODE_CLASS (code) == tcc_constant)
850 *walk_subtrees = 0;
852 /* Cope with the statement expression extension. */
853 else if (code == STATEMENT_LIST)
856 /* Leave the bulk of the work to copy_tree_r itself. */
857 else
858 copy_tree_r (tp, walk_subtrees, NULL);
860 return NULL_TREE;
863 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
864 If *TP has been visited already, then *TP is deeply copied by calling
865 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
867 static tree
868 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
870 tree t = *tp;
871 enum tree_code code = TREE_CODE (t);
873 /* Skip types, decls, and constants. But we do want to look at their
874 types and the bounds of types. Mark them as visited so we properly
875 unmark their subtrees on the unmark pass. If we've already seen them,
876 don't look down further. */
877 if (TREE_CODE_CLASS (code) == tcc_type
878 || TREE_CODE_CLASS (code) == tcc_declaration
879 || TREE_CODE_CLASS (code) == tcc_constant)
881 if (TREE_VISITED (t))
882 *walk_subtrees = 0;
883 else
884 TREE_VISITED (t) = 1;
887 /* If this node has been visited already, unshare it and don't look
888 any deeper. */
889 else if (TREE_VISITED (t))
891 walk_tree (tp, mostly_copy_tree_r, data, NULL);
892 *walk_subtrees = 0;
895 /* Otherwise, mark the node as visited and keep looking. */
896 else
897 TREE_VISITED (t) = 1;
899 return NULL_TREE;
902 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
903 copy_if_shared_r callback unmodified. */
905 static inline void
906 copy_if_shared (tree *tp, void *data)
908 walk_tree (tp, copy_if_shared_r, data, NULL);
911 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
912 any nested functions. */
914 static void
915 unshare_body (tree fndecl)
917 struct cgraph_node *cgn = cgraph_node::get (fndecl);
918 /* If the language requires deep unsharing, we need a pointer set to make
919 sure we don't repeatedly unshare subtrees of unshareable nodes. */
920 hash_set<tree> *visited
921 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
923 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
924 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
925 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
927 delete visited;
929 if (cgn)
930 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
931 unshare_body (cgn->decl);
934 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
935 Subtrees are walked until the first unvisited node is encountered. */
937 static tree
938 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
940 tree t = *tp;
942 /* If this node has been visited, unmark it and keep looking. */
943 if (TREE_VISITED (t))
944 TREE_VISITED (t) = 0;
946 /* Otherwise, don't look any deeper. */
947 else
948 *walk_subtrees = 0;
950 return NULL_TREE;
953 /* Unmark the visited trees rooted at *TP. */
955 static inline void
956 unmark_visited (tree *tp)
958 walk_tree (tp, unmark_visited_r, NULL, NULL);
961 /* Likewise, but mark all trees as not visited. */
963 static void
964 unvisit_body (tree fndecl)
966 struct cgraph_node *cgn = cgraph_node::get (fndecl);
968 unmark_visited (&DECL_SAVED_TREE (fndecl));
969 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
970 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
972 if (cgn)
973 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
974 unvisit_body (cgn->decl);
977 /* Unconditionally make an unshared copy of EXPR. This is used when using
978 stored expressions which span multiple functions, such as BINFO_VTABLE,
979 as the normal unsharing process can't tell that they're shared. */
981 tree
982 unshare_expr (tree expr)
984 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
985 return expr;
988 /* Worker for unshare_expr_without_location. */
990 static tree
991 prune_expr_location (tree *tp, int *walk_subtrees, void *)
993 if (EXPR_P (*tp))
994 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
995 else
996 *walk_subtrees = 0;
997 return NULL_TREE;
1000 /* Similar to unshare_expr but also prune all expression locations
1001 from EXPR. */
1003 tree
1004 unshare_expr_without_location (tree expr)
1006 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1007 if (EXPR_P (expr))
1008 walk_tree (&expr, prune_expr_location, NULL, NULL);
1009 return expr;
1012 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1013 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1014 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1015 EXPR is the location of the EXPR. */
1017 static location_t
1018 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1020 if (!expr)
1021 return or_else;
1023 if (EXPR_HAS_LOCATION (expr))
1024 return EXPR_LOCATION (expr);
1026 if (TREE_CODE (expr) != STATEMENT_LIST)
1027 return or_else;
1029 tree_stmt_iterator i = tsi_start (expr);
1031 bool found = false;
1032 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1034 found = true;
1035 tsi_next (&i);
1038 if (!found || !tsi_one_before_end_p (i))
1039 return or_else;
1041 return rexpr_location (tsi_stmt (i), or_else);
1044 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1045 rexpr_location for the potential recursion. */
1047 static inline bool
1048 rexpr_has_location (tree expr)
1050 return rexpr_location (expr) != UNKNOWN_LOCATION;
1054 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1055 contain statements and have a value. Assign its value to a temporary
1056 and give it void_type_node. Return the temporary, or NULL_TREE if
1057 WRAPPER was already void. */
1059 tree
1060 voidify_wrapper_expr (tree wrapper, tree temp)
1062 tree type = TREE_TYPE (wrapper);
1063 if (type && !VOID_TYPE_P (type))
1065 tree *p;
1067 /* Set p to point to the body of the wrapper. Loop until we find
1068 something that isn't a wrapper. */
1069 for (p = &wrapper; p && *p; )
1071 switch (TREE_CODE (*p))
1073 case BIND_EXPR:
1074 TREE_SIDE_EFFECTS (*p) = 1;
1075 TREE_TYPE (*p) = void_type_node;
1076 /* For a BIND_EXPR, the body is operand 1. */
1077 p = &BIND_EXPR_BODY (*p);
1078 break;
1080 case CLEANUP_POINT_EXPR:
1081 case TRY_FINALLY_EXPR:
1082 case TRY_CATCH_EXPR:
1083 TREE_SIDE_EFFECTS (*p) = 1;
1084 TREE_TYPE (*p) = void_type_node;
1085 p = &TREE_OPERAND (*p, 0);
1086 break;
1088 case STATEMENT_LIST:
1090 tree_stmt_iterator i = tsi_last (*p);
1091 TREE_SIDE_EFFECTS (*p) = 1;
1092 TREE_TYPE (*p) = void_type_node;
1093 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1095 break;
1097 case COMPOUND_EXPR:
1098 /* Advance to the last statement. Set all container types to
1099 void. */
1100 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1102 TREE_SIDE_EFFECTS (*p) = 1;
1103 TREE_TYPE (*p) = void_type_node;
1105 break;
1107 case TRANSACTION_EXPR:
1108 TREE_SIDE_EFFECTS (*p) = 1;
1109 TREE_TYPE (*p) = void_type_node;
1110 p = &TRANSACTION_EXPR_BODY (*p);
1111 break;
1113 default:
1114 /* Assume that any tree upon which voidify_wrapper_expr is
1115 directly called is a wrapper, and that its body is op0. */
1116 if (p == &wrapper)
1118 TREE_SIDE_EFFECTS (*p) = 1;
1119 TREE_TYPE (*p) = void_type_node;
1120 p = &TREE_OPERAND (*p, 0);
1121 break;
1123 goto out;
1127 out:
1128 if (p == NULL || IS_EMPTY_STMT (*p))
1129 temp = NULL_TREE;
1130 else if (temp)
1132 /* The wrapper is on the RHS of an assignment that we're pushing
1133 down. */
1134 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1135 || TREE_CODE (temp) == MODIFY_EXPR);
1136 TREE_OPERAND (temp, 1) = *p;
1137 *p = temp;
1139 else
1141 temp = create_tmp_var (type, "retval");
1142 *p = build2 (INIT_EXPR, type, temp, *p);
1145 return temp;
1148 return NULL_TREE;
1151 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1152 a temporary through which they communicate. */
1154 static void
1155 build_stack_save_restore (gcall **save, gcall **restore)
1157 tree tmp_var;
1159 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1160 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1161 gimple_call_set_lhs (*save, tmp_var);
1163 *restore
1164 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1165 1, tmp_var);
1168 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1170 static tree
1171 build_asan_poison_call_expr (tree decl)
1173 /* Do not poison variables that have size equal to zero. */
1174 tree unit_size = DECL_SIZE_UNIT (decl);
1175 if (zerop (unit_size))
1176 return NULL_TREE;
1178 tree base = build_fold_addr_expr (decl);
1180 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1181 void_type_node, 3,
1182 build_int_cst (integer_type_node,
1183 ASAN_MARK_POISON),
1184 base, unit_size);
1187 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1188 on POISON flag, shadow memory of a DECL variable. The call will be
1189 put on location identified by IT iterator, where BEFORE flag drives
1190 position where the stmt will be put. */
1192 static void
1193 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1194 bool before)
1196 tree unit_size = DECL_SIZE_UNIT (decl);
1197 tree base = build_fold_addr_expr (decl);
1199 /* Do not poison variables that have size equal to zero. */
1200 if (zerop (unit_size))
1201 return;
1203 /* It's necessary to have all stack variables aligned to ASAN granularity
1204 bytes. */
1205 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1206 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1208 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1210 gimple *g
1211 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1212 build_int_cst (integer_type_node, flags),
1213 base, unit_size);
1215 if (before)
1216 gsi_insert_before (it, g, GSI_NEW_STMT);
1217 else
1218 gsi_insert_after (it, g, GSI_NEW_STMT);
1221 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1222 either poisons or unpoisons a DECL. Created statement is appended
1223 to SEQ_P gimple sequence. */
1225 static void
1226 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1228 gimple_stmt_iterator it = gsi_last (*seq_p);
1229 bool before = false;
1231 if (gsi_end_p (it))
1232 before = true;
1234 asan_poison_variable (decl, poison, &it, before);
1237 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1239 static int
1240 sort_by_decl_uid (const void *a, const void *b)
1242 const tree *t1 = (const tree *)a;
1243 const tree *t2 = (const tree *)b;
1245 int uid1 = DECL_UID (*t1);
1246 int uid2 = DECL_UID (*t2);
1248 if (uid1 < uid2)
1249 return -1;
1250 else if (uid1 > uid2)
1251 return 1;
1252 else
1253 return 0;
1256 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1257 depending on POISON flag. Created statement is appended
1258 to SEQ_P gimple sequence. */
1260 static void
1261 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1263 unsigned c = variables->elements ();
1264 if (c == 0)
1265 return;
1267 auto_vec<tree> sorted_variables (c);
1269 for (hash_set<tree>::iterator it = variables->begin ();
1270 it != variables->end (); ++it)
1271 sorted_variables.safe_push (*it);
1273 sorted_variables.qsort (sort_by_decl_uid);
1275 unsigned i;
1276 tree var;
1277 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1279 asan_poison_variable (var, poison, seq_p);
1281 /* Add use_after_scope_memory attribute for the variable in order
1282 to prevent re-written into SSA. */
1283 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1284 DECL_ATTRIBUTES (var)))
1285 DECL_ATTRIBUTES (var)
1286 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1287 integer_one_node,
1288 DECL_ATTRIBUTES (var));
1292 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1294 static enum gimplify_status
1295 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1297 tree bind_expr = *expr_p;
1298 bool old_keep_stack = gimplify_ctxp->keep_stack;
1299 bool old_save_stack = gimplify_ctxp->save_stack;
1300 tree t;
1301 gbind *bind_stmt;
1302 gimple_seq body, cleanup;
1303 gcall *stack_save;
1304 location_t start_locus = 0, end_locus = 0;
1305 tree ret_clauses = NULL;
1307 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1309 /* Mark variables seen in this bind expr. */
1310 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1312 if (VAR_P (t))
1314 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1316 /* Mark variable as local. */
1317 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1318 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1319 || splay_tree_lookup (ctx->variables,
1320 (splay_tree_key) t) == NULL))
1322 if (ctx->region_type == ORT_SIMD
1323 && TREE_ADDRESSABLE (t)
1324 && !TREE_STATIC (t))
1325 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1326 else
1327 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1330 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1332 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1333 cfun->has_local_explicit_reg_vars = true;
1336 /* Preliminarily mark non-addressed complex variables as eligible
1337 for promotion to gimple registers. We'll transform their uses
1338 as we find them. */
1339 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1340 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1341 && !TREE_THIS_VOLATILE (t)
1342 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1343 && !needs_to_live_in_memory (t))
1344 DECL_GIMPLE_REG_P (t) = 1;
1347 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1348 BIND_EXPR_BLOCK (bind_expr));
1349 gimple_push_bind_expr (bind_stmt);
1351 gimplify_ctxp->keep_stack = false;
1352 gimplify_ctxp->save_stack = false;
1354 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1355 body = NULL;
1356 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1357 gimple_bind_set_body (bind_stmt, body);
1359 /* Source location wise, the cleanup code (stack_restore and clobbers)
1360 belongs to the end of the block, so propagate what we have. The
1361 stack_save operation belongs to the beginning of block, which we can
1362 infer from the bind_expr directly if the block has no explicit
1363 assignment. */
1364 if (BIND_EXPR_BLOCK (bind_expr))
1366 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1367 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1369 if (start_locus == 0)
1370 start_locus = EXPR_LOCATION (bind_expr);
1372 cleanup = NULL;
1373 stack_save = NULL;
1375 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1376 the stack space allocated to the VLAs. */
1377 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1379 gcall *stack_restore;
1381 /* Save stack on entry and restore it on exit. Add a try_finally
1382 block to achieve this. */
1383 build_stack_save_restore (&stack_save, &stack_restore);
1385 gimple_set_location (stack_save, start_locus);
1386 gimple_set_location (stack_restore, end_locus);
1388 gimplify_seq_add_stmt (&cleanup, stack_restore);
1391 /* Add clobbers for all variables that go out of scope. */
1392 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1394 if (VAR_P (t)
1395 && !is_global_var (t)
1396 && DECL_CONTEXT (t) == current_function_decl)
1398 if (!DECL_HARD_REGISTER (t)
1399 && !TREE_THIS_VOLATILE (t)
1400 && !DECL_HAS_VALUE_EXPR_P (t)
1401 /* Only care for variables that have to be in memory. Others
1402 will be rewritten into SSA names, hence moved to the
1403 top-level. */
1404 && !is_gimple_reg (t)
1405 && flag_stack_reuse != SR_NONE)
1407 tree clobber = build_clobber (TREE_TYPE (t));
1408 gimple *clobber_stmt;
1409 clobber_stmt = gimple_build_assign (t, clobber);
1410 gimple_set_location (clobber_stmt, end_locus);
1411 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1414 if (flag_openacc && oacc_declare_returns != NULL)
1416 tree *c = oacc_declare_returns->get (t);
1417 if (c != NULL)
1419 if (ret_clauses)
1420 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1422 ret_clauses = *c;
1424 oacc_declare_returns->remove (t);
1426 if (oacc_declare_returns->elements () == 0)
1428 delete oacc_declare_returns;
1429 oacc_declare_returns = NULL;
1435 if (asan_poisoned_variables != NULL
1436 && asan_poisoned_variables->contains (t))
1438 asan_poisoned_variables->remove (t);
1439 asan_poison_variable (t, true, &cleanup);
1442 if (gimplify_ctxp->live_switch_vars != NULL
1443 && gimplify_ctxp->live_switch_vars->contains (t))
1444 gimplify_ctxp->live_switch_vars->remove (t);
1447 if (ret_clauses)
1449 gomp_target *stmt;
1450 gimple_stmt_iterator si = gsi_start (cleanup);
1452 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1453 ret_clauses);
1454 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1457 if (cleanup)
1459 gtry *gs;
1460 gimple_seq new_body;
1462 new_body = NULL;
1463 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1464 GIMPLE_TRY_FINALLY);
1466 if (stack_save)
1467 gimplify_seq_add_stmt (&new_body, stack_save);
1468 gimplify_seq_add_stmt (&new_body, gs);
1469 gimple_bind_set_body (bind_stmt, new_body);
1472 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1473 if (!gimplify_ctxp->keep_stack)
1474 gimplify_ctxp->keep_stack = old_keep_stack;
1475 gimplify_ctxp->save_stack = old_save_stack;
1477 gimple_pop_bind_expr ();
1479 gimplify_seq_add_stmt (pre_p, bind_stmt);
1481 if (temp)
1483 *expr_p = temp;
1484 return GS_OK;
1487 *expr_p = NULL_TREE;
1488 return GS_ALL_DONE;
1491 /* Maybe add early return predict statement to PRE_P sequence. */
1493 static void
1494 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1496 /* If we are not in a conditional context, add PREDICT statement. */
1497 if (gimple_conditional_context ())
1499 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1500 NOT_TAKEN);
1501 gimplify_seq_add_stmt (pre_p, predict);
1505 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1506 GIMPLE value, it is assigned to a new temporary and the statement is
1507 re-written to return the temporary.
1509 PRE_P points to the sequence where side effects that must happen before
1510 STMT should be stored. */
1512 static enum gimplify_status
1513 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1515 greturn *ret;
1516 tree ret_expr = TREE_OPERAND (stmt, 0);
1517 tree result_decl, result;
1519 if (ret_expr == error_mark_node)
1520 return GS_ERROR;
1522 if (!ret_expr
1523 || TREE_CODE (ret_expr) == RESULT_DECL)
1525 maybe_add_early_return_predict_stmt (pre_p);
1526 greturn *ret = gimple_build_return (ret_expr);
1527 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1528 gimplify_seq_add_stmt (pre_p, ret);
1529 return GS_ALL_DONE;
1532 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1533 result_decl = NULL_TREE;
1534 else
1536 result_decl = TREE_OPERAND (ret_expr, 0);
1538 /* See through a return by reference. */
1539 if (TREE_CODE (result_decl) == INDIRECT_REF)
1540 result_decl = TREE_OPERAND (result_decl, 0);
1542 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1543 || TREE_CODE (ret_expr) == INIT_EXPR)
1544 && TREE_CODE (result_decl) == RESULT_DECL);
1547 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1548 Recall that aggregate_value_p is FALSE for any aggregate type that is
1549 returned in registers. If we're returning values in registers, then
1550 we don't want to extend the lifetime of the RESULT_DECL, particularly
1551 across another call. In addition, for those aggregates for which
1552 hard_function_value generates a PARALLEL, we'll die during normal
1553 expansion of structure assignments; there's special code in expand_return
1554 to handle this case that does not exist in expand_expr. */
1555 if (!result_decl)
1556 result = NULL_TREE;
1557 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1559 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1561 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1562 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1563 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1564 should be effectively allocated by the caller, i.e. all calls to
1565 this function must be subject to the Return Slot Optimization. */
1566 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1567 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1569 result = result_decl;
1571 else if (gimplify_ctxp->return_temp)
1572 result = gimplify_ctxp->return_temp;
1573 else
1575 result = create_tmp_reg (TREE_TYPE (result_decl));
1577 /* ??? With complex control flow (usually involving abnormal edges),
1578 we can wind up warning about an uninitialized value for this. Due
1579 to how this variable is constructed and initialized, this is never
1580 true. Give up and never warn. */
1581 TREE_NO_WARNING (result) = 1;
1583 gimplify_ctxp->return_temp = result;
1586 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1587 Then gimplify the whole thing. */
1588 if (result != result_decl)
1589 TREE_OPERAND (ret_expr, 0) = result;
1591 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1593 maybe_add_early_return_predict_stmt (pre_p);
1594 ret = gimple_build_return (result);
1595 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1596 gimplify_seq_add_stmt (pre_p, ret);
1598 return GS_ALL_DONE;
1601 /* Gimplify a variable-length array DECL. */
1603 static void
1604 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1606 /* This is a variable-sized decl. Simplify its size and mark it
1607 for deferred expansion. */
1608 tree t, addr, ptr_type;
1610 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1611 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1613 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1614 if (DECL_HAS_VALUE_EXPR_P (decl))
1615 return;
1617 /* All occurrences of this decl in final gimplified code will be
1618 replaced by indirection. Setting DECL_VALUE_EXPR does two
1619 things: First, it lets the rest of the gimplifier know what
1620 replacement to use. Second, it lets the debug info know
1621 where to find the value. */
1622 ptr_type = build_pointer_type (TREE_TYPE (decl));
1623 addr = create_tmp_var (ptr_type, get_name (decl));
1624 DECL_IGNORED_P (addr) = 0;
1625 t = build_fold_indirect_ref (addr);
1626 TREE_THIS_NOTRAP (t) = 1;
1627 SET_DECL_VALUE_EXPR (decl, t);
1628 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1630 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1631 max_int_size_in_bytes (TREE_TYPE (decl)));
1632 /* The call has been built for a variable-sized object. */
1633 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1634 t = fold_convert (ptr_type, t);
1635 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1637 gimplify_and_add (t, seq_p);
1640 /* A helper function to be called via walk_tree. Mark all labels under *TP
1641 as being forced. To be called for DECL_INITIAL of static variables. */
1643 static tree
1644 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1646 if (TYPE_P (*tp))
1647 *walk_subtrees = 0;
1648 if (TREE_CODE (*tp) == LABEL_DECL)
1650 FORCED_LABEL (*tp) = 1;
1651 cfun->has_forced_label_in_static = 1;
1654 return NULL_TREE;
1657 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1658 and initialization explicit. */
1660 static enum gimplify_status
1661 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1663 tree stmt = *stmt_p;
1664 tree decl = DECL_EXPR_DECL (stmt);
1666 *stmt_p = NULL_TREE;
1668 if (TREE_TYPE (decl) == error_mark_node)
1669 return GS_ERROR;
1671 if ((TREE_CODE (decl) == TYPE_DECL
1672 || VAR_P (decl))
1673 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1675 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1676 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1677 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1680 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1681 in case its size expressions contain problematic nodes like CALL_EXPR. */
1682 if (TREE_CODE (decl) == TYPE_DECL
1683 && DECL_ORIGINAL_TYPE (decl)
1684 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1686 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1687 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1688 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1691 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1693 tree init = DECL_INITIAL (decl);
1694 bool is_vla = false;
1696 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1697 || (!TREE_STATIC (decl)
1698 && flag_stack_check == GENERIC_STACK_CHECK
1699 && compare_tree_int (DECL_SIZE_UNIT (decl),
1700 STACK_CHECK_MAX_VAR_SIZE) > 0))
1702 gimplify_vla_decl (decl, seq_p);
1703 is_vla = true;
1706 if (asan_poisoned_variables
1707 && !is_vla
1708 && TREE_ADDRESSABLE (decl)
1709 && !TREE_STATIC (decl)
1710 && !DECL_HAS_VALUE_EXPR_P (decl)
1711 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1712 && dbg_cnt (asan_use_after_scope)
1713 && !gimplify_omp_ctxp)
1715 asan_poisoned_variables->add (decl);
1716 asan_poison_variable (decl, false, seq_p);
1717 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1718 gimplify_ctxp->live_switch_vars->add (decl);
1721 /* Some front ends do not explicitly declare all anonymous
1722 artificial variables. We compensate here by declaring the
1723 variables, though it would be better if the front ends would
1724 explicitly declare them. */
1725 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1726 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1727 gimple_add_tmp_var (decl);
1729 if (init && init != error_mark_node)
1731 if (!TREE_STATIC (decl))
1733 DECL_INITIAL (decl) = NULL_TREE;
1734 init = build2 (INIT_EXPR, void_type_node, decl, init);
1735 gimplify_and_add (init, seq_p);
1736 ggc_free (init);
1738 else
1739 /* We must still examine initializers for static variables
1740 as they may contain a label address. */
1741 walk_tree (&init, force_labels_r, NULL, NULL);
1745 return GS_ALL_DONE;
1748 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1749 and replacing the LOOP_EXPR with goto, but if the loop contains an
1750 EXIT_EXPR, we need to append a label for it to jump to. */
1752 static enum gimplify_status
1753 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1755 tree saved_label = gimplify_ctxp->exit_label;
1756 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1758 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1760 gimplify_ctxp->exit_label = NULL_TREE;
1762 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1764 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1766 if (gimplify_ctxp->exit_label)
1767 gimplify_seq_add_stmt (pre_p,
1768 gimple_build_label (gimplify_ctxp->exit_label));
1770 gimplify_ctxp->exit_label = saved_label;
1772 *expr_p = NULL;
1773 return GS_ALL_DONE;
1776 /* Gimplify a statement list onto a sequence. These may be created either
1777 by an enlightened front-end, or by shortcut_cond_expr. */
1779 static enum gimplify_status
1780 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1782 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1784 tree_stmt_iterator i = tsi_start (*expr_p);
1786 while (!tsi_end_p (i))
1788 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1789 tsi_delink (&i);
1792 if (temp)
1794 *expr_p = temp;
1795 return GS_OK;
1798 return GS_ALL_DONE;
1801 /* Callback for walk_gimple_seq. */
1803 static tree
1804 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1805 struct walk_stmt_info *wi)
1807 gimple *stmt = gsi_stmt (*gsi_p);
1809 *handled_ops_p = true;
1810 switch (gimple_code (stmt))
1812 case GIMPLE_TRY:
1813 /* A compiler-generated cleanup or a user-written try block.
1814 If it's empty, don't dive into it--that would result in
1815 worse location info. */
1816 if (gimple_try_eval (stmt) == NULL)
1818 wi->info = stmt;
1819 return integer_zero_node;
1821 /* Fall through. */
1822 case GIMPLE_BIND:
1823 case GIMPLE_CATCH:
1824 case GIMPLE_EH_FILTER:
1825 case GIMPLE_TRANSACTION:
1826 /* Walk the sub-statements. */
1827 *handled_ops_p = false;
1828 break;
1830 case GIMPLE_DEBUG:
1831 /* Ignore these. We may generate them before declarations that
1832 are never executed. If there's something to warn about,
1833 there will be non-debug stmts too, and we'll catch those. */
1834 break;
1836 case GIMPLE_CALL:
1837 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1839 *handled_ops_p = false;
1840 break;
1842 /* Fall through. */
1843 default:
1844 /* Save the first "real" statement (not a decl/lexical scope/...). */
1845 wi->info = stmt;
1846 return integer_zero_node;
1848 return NULL_TREE;
1851 /* Possibly warn about unreachable statements between switch's controlling
1852 expression and the first case. SEQ is the body of a switch expression. */
1854 static void
1855 maybe_warn_switch_unreachable (gimple_seq seq)
1857 if (!warn_switch_unreachable
1858 /* This warning doesn't play well with Fortran when optimizations
1859 are on. */
1860 || lang_GNU_Fortran ()
1861 || seq == NULL)
1862 return;
1864 struct walk_stmt_info wi;
1865 memset (&wi, 0, sizeof (wi));
1866 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1867 gimple *stmt = (gimple *) wi.info;
1869 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1871 if (gimple_code (stmt) == GIMPLE_GOTO
1872 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1873 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1874 /* Don't warn for compiler-generated gotos. These occur
1875 in Duff's devices, for example. */;
1876 else
1877 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1878 "statement will never be executed");
1883 /* A label entry that pairs label and a location. */
1884 struct label_entry
1886 tree label;
1887 location_t loc;
1890 /* Find LABEL in vector of label entries VEC. */
1892 static struct label_entry *
1893 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1895 unsigned int i;
1896 struct label_entry *l;
1898 FOR_EACH_VEC_ELT (*vec, i, l)
1899 if (l->label == label)
1900 return l;
1901 return NULL;
1904 /* Return true if LABEL, a LABEL_DECL, represents a case label
1905 in a vector of labels CASES. */
1907 static bool
1908 case_label_p (const vec<tree> *cases, tree label)
1910 unsigned int i;
1911 tree l;
1913 FOR_EACH_VEC_ELT (*cases, i, l)
1914 if (CASE_LABEL (l) == label)
1915 return true;
1916 return false;
1919 /* Find the last nondebug statement in a scope STMT. */
1921 static gimple *
1922 last_stmt_in_scope (gimple *stmt)
1924 if (!stmt)
1925 return NULL;
1927 switch (gimple_code (stmt))
1929 case GIMPLE_BIND:
1931 gbind *bind = as_a <gbind *> (stmt);
1932 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
1933 return last_stmt_in_scope (stmt);
1936 case GIMPLE_TRY:
1938 gtry *try_stmt = as_a <gtry *> (stmt);
1939 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
1940 gimple *last_eval = last_stmt_in_scope (stmt);
1941 if (gimple_stmt_may_fallthru (last_eval)
1942 && (last_eval == NULL
1943 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1944 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1946 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
1947 return last_stmt_in_scope (stmt);
1949 else
1950 return last_eval;
1953 case GIMPLE_DEBUG:
1954 gcc_unreachable ();
1956 default:
1957 return stmt;
1961 /* Collect interesting labels in LABELS and return the statement preceding
1962 another case label, or a user-defined label. Store a location useful
1963 to give warnings at *PREVLOC (usually the location of the returned
1964 statement or of its surrounding scope). */
1966 static gimple *
1967 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1968 auto_vec <struct label_entry> *labels,
1969 location_t *prevloc)
1971 gimple *prev = NULL;
1973 *prevloc = UNKNOWN_LOCATION;
1976 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
1978 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
1979 which starts on a GIMPLE_SWITCH and ends with a break label.
1980 Handle that as a single statement that can fall through. */
1981 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
1982 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
1983 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
1984 if (last
1985 && gimple_code (first) == GIMPLE_SWITCH
1986 && gimple_code (last) == GIMPLE_LABEL)
1988 tree label = gimple_label_label (as_a <glabel *> (last));
1989 if (SWITCH_BREAK_LABEL_P (label))
1991 prev = bind;
1992 gsi_next (gsi_p);
1993 continue;
1997 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1998 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2000 /* Nested scope. Only look at the last statement of
2001 the innermost scope. */
2002 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2003 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2004 if (last)
2006 prev = last;
2007 /* It might be a label without a location. Use the
2008 location of the scope then. */
2009 if (!gimple_has_location (prev))
2010 *prevloc = bind_loc;
2012 gsi_next (gsi_p);
2013 continue;
2016 /* Ifs are tricky. */
2017 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2019 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2020 tree false_lab = gimple_cond_false_label (cond_stmt);
2021 location_t if_loc = gimple_location (cond_stmt);
2023 /* If we have e.g.
2024 if (i > 1) goto <D.2259>; else goto D;
2025 we can't do much with the else-branch. */
2026 if (!DECL_ARTIFICIAL (false_lab))
2027 break;
2029 /* Go on until the false label, then one step back. */
2030 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2032 gimple *stmt = gsi_stmt (*gsi_p);
2033 if (gimple_code (stmt) == GIMPLE_LABEL
2034 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2035 break;
2038 /* Not found? Oops. */
2039 if (gsi_end_p (*gsi_p))
2040 break;
2042 struct label_entry l = { false_lab, if_loc };
2043 labels->safe_push (l);
2045 /* Go to the last statement of the then branch. */
2046 gsi_prev (gsi_p);
2048 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2049 <D.1759>:
2050 <stmt>;
2051 goto <D.1761>;
2052 <D.1760>:
2054 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2055 && !gimple_has_location (gsi_stmt (*gsi_p)))
2057 /* Look at the statement before, it might be
2058 attribute fallthrough, in which case don't warn. */
2059 gsi_prev (gsi_p);
2060 bool fallthru_before_dest
2061 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2062 gsi_next (gsi_p);
2063 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2064 if (!fallthru_before_dest)
2066 struct label_entry l = { goto_dest, if_loc };
2067 labels->safe_push (l);
2070 /* And move back. */
2071 gsi_next (gsi_p);
2074 /* Remember the last statement. Skip labels that are of no interest
2075 to us. */
2076 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2078 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2079 if (find_label_entry (labels, label))
2080 prev = gsi_stmt (*gsi_p);
2082 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2084 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2085 prev = gsi_stmt (*gsi_p);
2086 gsi_next (gsi_p);
2088 while (!gsi_end_p (*gsi_p)
2089 /* Stop if we find a case or a user-defined label. */
2090 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2091 || !gimple_has_location (gsi_stmt (*gsi_p))));
2093 if (prev && gimple_has_location (prev))
2094 *prevloc = gimple_location (prev);
2095 return prev;
2098 /* Return true if the switch fallthough warning should occur. LABEL is
2099 the label statement that we're falling through to. */
2101 static bool
2102 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2104 gimple_stmt_iterator gsi = *gsi_p;
2106 /* Don't warn if the label is marked with a "falls through" comment. */
2107 if (FALLTHROUGH_LABEL_P (label))
2108 return false;
2110 /* Don't warn for non-case labels followed by a statement:
2111 case 0:
2112 foo ();
2113 label:
2114 bar ();
2115 as these are likely intentional. */
2116 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2118 tree l;
2119 while (!gsi_end_p (gsi)
2120 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2121 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2122 && !case_label_p (&gimplify_ctxp->case_labels, l))
2123 gsi_next_nondebug (&gsi);
2124 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2125 return false;
2128 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2129 immediately breaks. */
2130 gsi = *gsi_p;
2132 /* Skip all immediately following labels. */
2133 while (!gsi_end_p (gsi)
2134 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2135 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2136 gsi_next_nondebug (&gsi);
2138 /* { ... something; default:; } */
2139 if (gsi_end_p (gsi)
2140 /* { ... something; default: break; } or
2141 { ... something; default: goto L; } */
2142 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2143 /* { ... something; default: return; } */
2144 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2145 return false;
2147 return true;
2150 /* Callback for walk_gimple_seq. */
2152 static tree
2153 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2154 struct walk_stmt_info *)
2156 gimple *stmt = gsi_stmt (*gsi_p);
2158 *handled_ops_p = true;
2159 switch (gimple_code (stmt))
2161 case GIMPLE_TRY:
2162 case GIMPLE_BIND:
2163 case GIMPLE_CATCH:
2164 case GIMPLE_EH_FILTER:
2165 case GIMPLE_TRANSACTION:
2166 /* Walk the sub-statements. */
2167 *handled_ops_p = false;
2168 break;
2170 /* Find a sequence of form:
2172 GIMPLE_LABEL
2173 [...]
2174 <may fallthru stmt>
2175 GIMPLE_LABEL
2177 and possibly warn. */
2178 case GIMPLE_LABEL:
2180 /* Found a label. Skip all immediately following labels. */
2181 while (!gsi_end_p (*gsi_p)
2182 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2183 gsi_next_nondebug (gsi_p);
2185 /* There might be no more statements. */
2186 if (gsi_end_p (*gsi_p))
2187 return integer_zero_node;
2189 /* Vector of labels that fall through. */
2190 auto_vec <struct label_entry> labels;
2191 location_t prevloc;
2192 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2194 /* There might be no more statements. */
2195 if (gsi_end_p (*gsi_p))
2196 return integer_zero_node;
2198 gimple *next = gsi_stmt (*gsi_p);
2199 tree label;
2200 /* If what follows is a label, then we may have a fallthrough. */
2201 if (gimple_code (next) == GIMPLE_LABEL
2202 && gimple_has_location (next)
2203 && (label = gimple_label_label (as_a <glabel *> (next)))
2204 && prev != NULL)
2206 struct label_entry *l;
2207 bool warned_p = false;
2208 auto_diagnostic_group d;
2209 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2210 /* Quiet. */;
2211 else if (gimple_code (prev) == GIMPLE_LABEL
2212 && (label = gimple_label_label (as_a <glabel *> (prev)))
2213 && (l = find_label_entry (&labels, label)))
2214 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2215 "this statement may fall through");
2216 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2217 /* Try to be clever and don't warn when the statement
2218 can't actually fall through. */
2219 && gimple_stmt_may_fallthru (prev)
2220 && prevloc != UNKNOWN_LOCATION)
2221 warned_p = warning_at (prevloc,
2222 OPT_Wimplicit_fallthrough_,
2223 "this statement may fall through");
2224 if (warned_p)
2225 inform (gimple_location (next), "here");
2227 /* Mark this label as processed so as to prevent multiple
2228 warnings in nested switches. */
2229 FALLTHROUGH_LABEL_P (label) = true;
2231 /* So that next warn_implicit_fallthrough_r will start looking for
2232 a new sequence starting with this label. */
2233 gsi_prev (gsi_p);
2236 break;
2237 default:
2238 break;
2240 return NULL_TREE;
2243 /* Warn when a switch case falls through. */
2245 static void
2246 maybe_warn_implicit_fallthrough (gimple_seq seq)
2248 if (!warn_implicit_fallthrough)
2249 return;
2251 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2252 if (!(lang_GNU_C ()
2253 || lang_GNU_CXX ()
2254 || lang_GNU_OBJC ()))
2255 return;
2257 struct walk_stmt_info wi;
2258 memset (&wi, 0, sizeof (wi));
2259 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2262 /* Callback for walk_gimple_seq. */
2264 static tree
2265 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2266 struct walk_stmt_info *)
2268 gimple *stmt = gsi_stmt (*gsi_p);
2270 *handled_ops_p = true;
2271 switch (gimple_code (stmt))
2273 case GIMPLE_TRY:
2274 case GIMPLE_BIND:
2275 case GIMPLE_CATCH:
2276 case GIMPLE_EH_FILTER:
2277 case GIMPLE_TRANSACTION:
2278 /* Walk the sub-statements. */
2279 *handled_ops_p = false;
2280 break;
2281 case GIMPLE_CALL:
2282 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2284 gsi_remove (gsi_p, true);
2285 if (gsi_end_p (*gsi_p))
2286 return integer_zero_node;
2288 bool found = false;
2289 location_t loc = gimple_location (stmt);
2291 gimple_stmt_iterator gsi2 = *gsi_p;
2292 stmt = gsi_stmt (gsi2);
2293 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2295 /* Go on until the artificial label. */
2296 tree goto_dest = gimple_goto_dest (stmt);
2297 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2299 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2300 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2301 == goto_dest)
2302 break;
2305 /* Not found? Stop. */
2306 if (gsi_end_p (gsi2))
2307 break;
2309 /* Look one past it. */
2310 gsi_next (&gsi2);
2313 /* We're looking for a case label or default label here. */
2314 while (!gsi_end_p (gsi2))
2316 stmt = gsi_stmt (gsi2);
2317 if (gimple_code (stmt) == GIMPLE_LABEL)
2319 tree label = gimple_label_label (as_a <glabel *> (stmt));
2320 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2322 found = true;
2323 break;
2326 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2328 else if (!is_gimple_debug (stmt))
2329 /* Anything else is not expected. */
2330 break;
2331 gsi_next (&gsi2);
2333 if (!found)
2334 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2335 "a case label or default label");
2337 break;
2338 default:
2339 break;
2341 return NULL_TREE;
2344 /* Expand all FALLTHROUGH () calls in SEQ. */
2346 static void
2347 expand_FALLTHROUGH (gimple_seq *seq_p)
2349 struct walk_stmt_info wi;
2350 memset (&wi, 0, sizeof (wi));
2351 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2355 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2356 branch to. */
2358 static enum gimplify_status
2359 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2361 tree switch_expr = *expr_p;
2362 gimple_seq switch_body_seq = NULL;
2363 enum gimplify_status ret;
2364 tree index_type = TREE_TYPE (switch_expr);
2365 if (index_type == NULL_TREE)
2366 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2368 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2369 fb_rvalue);
2370 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2371 return ret;
2373 if (SWITCH_BODY (switch_expr))
2375 vec<tree> labels;
2376 vec<tree> saved_labels;
2377 hash_set<tree> *saved_live_switch_vars = NULL;
2378 tree default_case = NULL_TREE;
2379 gswitch *switch_stmt;
2381 /* Save old labels, get new ones from body, then restore the old
2382 labels. Save all the things from the switch body to append after. */
2383 saved_labels = gimplify_ctxp->case_labels;
2384 gimplify_ctxp->case_labels.create (8);
2386 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2387 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2388 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2389 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2390 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2391 else
2392 gimplify_ctxp->live_switch_vars = NULL;
2394 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2395 gimplify_ctxp->in_switch_expr = true;
2397 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2399 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2400 maybe_warn_switch_unreachable (switch_body_seq);
2401 maybe_warn_implicit_fallthrough (switch_body_seq);
2402 /* Only do this for the outermost GIMPLE_SWITCH. */
2403 if (!gimplify_ctxp->in_switch_expr)
2404 expand_FALLTHROUGH (&switch_body_seq);
2406 labels = gimplify_ctxp->case_labels;
2407 gimplify_ctxp->case_labels = saved_labels;
2409 if (gimplify_ctxp->live_switch_vars)
2411 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2412 delete gimplify_ctxp->live_switch_vars;
2414 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2416 preprocess_case_label_vec_for_gimple (labels, index_type,
2417 &default_case);
2419 bool add_bind = false;
2420 if (!default_case)
2422 glabel *new_default;
2424 default_case
2425 = build_case_label (NULL_TREE, NULL_TREE,
2426 create_artificial_label (UNKNOWN_LOCATION));
2427 if (old_in_switch_expr)
2429 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2430 add_bind = true;
2432 new_default = gimple_build_label (CASE_LABEL (default_case));
2433 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2435 else if (old_in_switch_expr)
2437 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2438 if (last && gimple_code (last) == GIMPLE_LABEL)
2440 tree label = gimple_label_label (as_a <glabel *> (last));
2441 if (SWITCH_BREAK_LABEL_P (label))
2442 add_bind = true;
2446 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2447 default_case, labels);
2448 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2449 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2450 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2451 so that we can easily find the start and end of the switch
2452 statement. */
2453 if (add_bind)
2455 gimple_seq bind_body = NULL;
2456 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2457 gimple_seq_add_seq (&bind_body, switch_body_seq);
2458 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2459 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2460 gimplify_seq_add_stmt (pre_p, bind);
2462 else
2464 gimplify_seq_add_stmt (pre_p, switch_stmt);
2465 gimplify_seq_add_seq (pre_p, switch_body_seq);
2467 labels.release ();
2469 else
2470 gcc_unreachable ();
2472 return GS_ALL_DONE;
2475 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2477 static enum gimplify_status
2478 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2480 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2481 == current_function_decl);
2483 tree label = LABEL_EXPR_LABEL (*expr_p);
2484 glabel *label_stmt = gimple_build_label (label);
2485 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2486 gimplify_seq_add_stmt (pre_p, label_stmt);
2488 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2489 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2490 NOT_TAKEN));
2491 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2492 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2493 TAKEN));
2495 return GS_ALL_DONE;
2498 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2500 static enum gimplify_status
2501 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2503 struct gimplify_ctx *ctxp;
2504 glabel *label_stmt;
2506 /* Invalid programs can play Duff's Device type games with, for example,
2507 #pragma omp parallel. At least in the C front end, we don't
2508 detect such invalid branches until after gimplification, in the
2509 diagnose_omp_blocks pass. */
2510 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2511 if (ctxp->case_labels.exists ())
2512 break;
2514 tree label = CASE_LABEL (*expr_p);
2515 label_stmt = gimple_build_label (label);
2516 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2517 ctxp->case_labels.safe_push (*expr_p);
2518 gimplify_seq_add_stmt (pre_p, label_stmt);
2520 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2521 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2522 NOT_TAKEN));
2523 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2524 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2525 TAKEN));
2527 return GS_ALL_DONE;
2530 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2531 if necessary. */
2533 tree
2534 build_and_jump (tree *label_p)
2536 if (label_p == NULL)
2537 /* If there's nowhere to jump, just fall through. */
2538 return NULL_TREE;
2540 if (*label_p == NULL_TREE)
2542 tree label = create_artificial_label (UNKNOWN_LOCATION);
2543 *label_p = label;
2546 return build1 (GOTO_EXPR, void_type_node, *label_p);
2549 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2550 This also involves building a label to jump to and communicating it to
2551 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2553 static enum gimplify_status
2554 gimplify_exit_expr (tree *expr_p)
2556 tree cond = TREE_OPERAND (*expr_p, 0);
2557 tree expr;
2559 expr = build_and_jump (&gimplify_ctxp->exit_label);
2560 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2561 *expr_p = expr;
2563 return GS_OK;
2566 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2567 different from its canonical type, wrap the whole thing inside a
2568 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2569 type.
2571 The canonical type of a COMPONENT_REF is the type of the field being
2572 referenced--unless the field is a bit-field which can be read directly
2573 in a smaller mode, in which case the canonical type is the
2574 sign-appropriate type corresponding to that mode. */
2576 static void
2577 canonicalize_component_ref (tree *expr_p)
2579 tree expr = *expr_p;
2580 tree type;
2582 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2584 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2585 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2586 else
2587 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2589 /* One could argue that all the stuff below is not necessary for
2590 the non-bitfield case and declare it a FE error if type
2591 adjustment would be needed. */
2592 if (TREE_TYPE (expr) != type)
2594 #ifdef ENABLE_TYPES_CHECKING
2595 tree old_type = TREE_TYPE (expr);
2596 #endif
2597 int type_quals;
2599 /* We need to preserve qualifiers and propagate them from
2600 operand 0. */
2601 type_quals = TYPE_QUALS (type)
2602 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2603 if (TYPE_QUALS (type) != type_quals)
2604 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2606 /* Set the type of the COMPONENT_REF to the underlying type. */
2607 TREE_TYPE (expr) = type;
2609 #ifdef ENABLE_TYPES_CHECKING
2610 /* It is now a FE error, if the conversion from the canonical
2611 type to the original expression type is not useless. */
2612 gcc_assert (useless_type_conversion_p (old_type, type));
2613 #endif
2617 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2618 to foo, embed that change in the ADDR_EXPR by converting
2619 T array[U];
2620 (T *)&array
2622 &array[L]
2623 where L is the lower bound. For simplicity, only do this for constant
2624 lower bound.
2625 The constraint is that the type of &array[L] is trivially convertible
2626 to T *. */
2628 static void
2629 canonicalize_addr_expr (tree *expr_p)
2631 tree expr = *expr_p;
2632 tree addr_expr = TREE_OPERAND (expr, 0);
2633 tree datype, ddatype, pddatype;
2635 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2636 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2637 || TREE_CODE (addr_expr) != ADDR_EXPR)
2638 return;
2640 /* The addr_expr type should be a pointer to an array. */
2641 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2642 if (TREE_CODE (datype) != ARRAY_TYPE)
2643 return;
2645 /* The pointer to element type shall be trivially convertible to
2646 the expression pointer type. */
2647 ddatype = TREE_TYPE (datype);
2648 pddatype = build_pointer_type (ddatype);
2649 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2650 pddatype))
2651 return;
2653 /* The lower bound and element sizes must be constant. */
2654 if (!TYPE_SIZE_UNIT (ddatype)
2655 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2656 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2657 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2658 return;
2660 /* All checks succeeded. Build a new node to merge the cast. */
2661 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2662 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2663 NULL_TREE, NULL_TREE);
2664 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2666 /* We can have stripped a required restrict qualifier above. */
2667 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2668 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2671 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2672 underneath as appropriate. */
2674 static enum gimplify_status
2675 gimplify_conversion (tree *expr_p)
2677 location_t loc = EXPR_LOCATION (*expr_p);
2678 gcc_assert (CONVERT_EXPR_P (*expr_p));
2680 /* Then strip away all but the outermost conversion. */
2681 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2683 /* And remove the outermost conversion if it's useless. */
2684 if (tree_ssa_useless_type_conversion (*expr_p))
2685 *expr_p = TREE_OPERAND (*expr_p, 0);
2687 /* If we still have a conversion at the toplevel,
2688 then canonicalize some constructs. */
2689 if (CONVERT_EXPR_P (*expr_p))
2691 tree sub = TREE_OPERAND (*expr_p, 0);
2693 /* If a NOP conversion is changing the type of a COMPONENT_REF
2694 expression, then canonicalize its type now in order to expose more
2695 redundant conversions. */
2696 if (TREE_CODE (sub) == COMPONENT_REF)
2697 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2699 /* If a NOP conversion is changing a pointer to array of foo
2700 to a pointer to foo, embed that change in the ADDR_EXPR. */
2701 else if (TREE_CODE (sub) == ADDR_EXPR)
2702 canonicalize_addr_expr (expr_p);
2705 /* If we have a conversion to a non-register type force the
2706 use of a VIEW_CONVERT_EXPR instead. */
2707 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2708 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2709 TREE_OPERAND (*expr_p, 0));
2711 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2712 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2713 TREE_SET_CODE (*expr_p, NOP_EXPR);
2715 return GS_OK;
2718 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2719 DECL_VALUE_EXPR, and it's worth re-examining things. */
2721 static enum gimplify_status
2722 gimplify_var_or_parm_decl (tree *expr_p)
2724 tree decl = *expr_p;
2726 /* ??? If this is a local variable, and it has not been seen in any
2727 outer BIND_EXPR, then it's probably the result of a duplicate
2728 declaration, for which we've already issued an error. It would
2729 be really nice if the front end wouldn't leak these at all.
2730 Currently the only known culprit is C++ destructors, as seen
2731 in g++.old-deja/g++.jason/binding.C. */
2732 if (VAR_P (decl)
2733 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2734 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2735 && decl_function_context (decl) == current_function_decl)
2737 gcc_assert (seen_error ());
2738 return GS_ERROR;
2741 /* When within an OMP context, notice uses of variables. */
2742 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2743 return GS_ALL_DONE;
2745 /* If the decl is an alias for another expression, substitute it now. */
2746 if (DECL_HAS_VALUE_EXPR_P (decl))
2748 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2749 return GS_OK;
2752 return GS_ALL_DONE;
2755 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2757 static void
2758 recalculate_side_effects (tree t)
2760 enum tree_code code = TREE_CODE (t);
2761 int len = TREE_OPERAND_LENGTH (t);
2762 int i;
2764 switch (TREE_CODE_CLASS (code))
2766 case tcc_expression:
2767 switch (code)
2769 case INIT_EXPR:
2770 case MODIFY_EXPR:
2771 case VA_ARG_EXPR:
2772 case PREDECREMENT_EXPR:
2773 case PREINCREMENT_EXPR:
2774 case POSTDECREMENT_EXPR:
2775 case POSTINCREMENT_EXPR:
2776 /* All of these have side-effects, no matter what their
2777 operands are. */
2778 return;
2780 default:
2781 break;
2783 /* Fall through. */
2785 case tcc_comparison: /* a comparison expression */
2786 case tcc_unary: /* a unary arithmetic expression */
2787 case tcc_binary: /* a binary arithmetic expression */
2788 case tcc_reference: /* a reference */
2789 case tcc_vl_exp: /* a function call */
2790 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2791 for (i = 0; i < len; ++i)
2793 tree op = TREE_OPERAND (t, i);
2794 if (op && TREE_SIDE_EFFECTS (op))
2795 TREE_SIDE_EFFECTS (t) = 1;
2797 break;
2799 case tcc_constant:
2800 /* No side-effects. */
2801 return;
2803 default:
2804 gcc_unreachable ();
2808 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2809 node *EXPR_P.
2811 compound_lval
2812 : min_lval '[' val ']'
2813 | min_lval '.' ID
2814 | compound_lval '[' val ']'
2815 | compound_lval '.' ID
2817 This is not part of the original SIMPLE definition, which separates
2818 array and member references, but it seems reasonable to handle them
2819 together. Also, this way we don't run into problems with union
2820 aliasing; gcc requires that for accesses through a union to alias, the
2821 union reference must be explicit, which was not always the case when we
2822 were splitting up array and member refs.
2824 PRE_P points to the sequence where side effects that must happen before
2825 *EXPR_P should be stored.
2827 POST_P points to the sequence where side effects that must happen after
2828 *EXPR_P should be stored. */
2830 static enum gimplify_status
2831 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2832 fallback_t fallback)
2834 tree *p;
2835 enum gimplify_status ret = GS_ALL_DONE, tret;
2836 int i;
2837 location_t loc = EXPR_LOCATION (*expr_p);
2838 tree expr = *expr_p;
2840 /* Create a stack of the subexpressions so later we can walk them in
2841 order from inner to outer. */
2842 auto_vec<tree, 10> expr_stack;
2844 /* We can handle anything that get_inner_reference can deal with. */
2845 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2847 restart:
2848 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2849 if (TREE_CODE (*p) == INDIRECT_REF)
2850 *p = fold_indirect_ref_loc (loc, *p);
2852 if (handled_component_p (*p))
2854 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2855 additional COMPONENT_REFs. */
2856 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2857 && gimplify_var_or_parm_decl (p) == GS_OK)
2858 goto restart;
2859 else
2860 break;
2862 expr_stack.safe_push (*p);
2865 gcc_assert (expr_stack.length ());
2867 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2868 walked through and P points to the innermost expression.
2870 Java requires that we elaborated nodes in source order. That
2871 means we must gimplify the inner expression followed by each of
2872 the indices, in order. But we can't gimplify the inner
2873 expression until we deal with any variable bounds, sizes, or
2874 positions in order to deal with PLACEHOLDER_EXPRs.
2876 So we do this in three steps. First we deal with the annotations
2877 for any variables in the components, then we gimplify the base,
2878 then we gimplify any indices, from left to right. */
2879 for (i = expr_stack.length () - 1; i >= 0; i--)
2881 tree t = expr_stack[i];
2883 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2885 /* Gimplify the low bound and element type size and put them into
2886 the ARRAY_REF. If these values are set, they have already been
2887 gimplified. */
2888 if (TREE_OPERAND (t, 2) == NULL_TREE)
2890 tree low = unshare_expr (array_ref_low_bound (t));
2891 if (!is_gimple_min_invariant (low))
2893 TREE_OPERAND (t, 2) = low;
2894 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2895 post_p, is_gimple_reg,
2896 fb_rvalue);
2897 ret = MIN (ret, tret);
2900 else
2902 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2903 is_gimple_reg, fb_rvalue);
2904 ret = MIN (ret, tret);
2907 if (TREE_OPERAND (t, 3) == NULL_TREE)
2909 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2910 tree elmt_size = unshare_expr (array_ref_element_size (t));
2911 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2913 /* Divide the element size by the alignment of the element
2914 type (above). */
2915 elmt_size
2916 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2918 if (!is_gimple_min_invariant (elmt_size))
2920 TREE_OPERAND (t, 3) = elmt_size;
2921 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2922 post_p, is_gimple_reg,
2923 fb_rvalue);
2924 ret = MIN (ret, tret);
2927 else
2929 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2930 is_gimple_reg, fb_rvalue);
2931 ret = MIN (ret, tret);
2934 else if (TREE_CODE (t) == COMPONENT_REF)
2936 /* Set the field offset into T and gimplify it. */
2937 if (TREE_OPERAND (t, 2) == NULL_TREE)
2939 tree offset = unshare_expr (component_ref_field_offset (t));
2940 tree field = TREE_OPERAND (t, 1);
2941 tree factor
2942 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2944 /* Divide the offset by its alignment. */
2945 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2947 if (!is_gimple_min_invariant (offset))
2949 TREE_OPERAND (t, 2) = offset;
2950 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2951 post_p, is_gimple_reg,
2952 fb_rvalue);
2953 ret = MIN (ret, tret);
2956 else
2958 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2959 is_gimple_reg, fb_rvalue);
2960 ret = MIN (ret, tret);
2965 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2966 so as to match the min_lval predicate. Failure to do so may result
2967 in the creation of large aggregate temporaries. */
2968 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2969 fallback | fb_lvalue);
2970 ret = MIN (ret, tret);
2972 /* And finally, the indices and operands of ARRAY_REF. During this
2973 loop we also remove any useless conversions. */
2974 for (; expr_stack.length () > 0; )
2976 tree t = expr_stack.pop ();
2978 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2980 /* Gimplify the dimension. */
2981 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2983 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2984 is_gimple_val, fb_rvalue);
2985 ret = MIN (ret, tret);
2989 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2991 /* The innermost expression P may have originally had
2992 TREE_SIDE_EFFECTS set which would have caused all the outer
2993 expressions in *EXPR_P leading to P to also have had
2994 TREE_SIDE_EFFECTS set. */
2995 recalculate_side_effects (t);
2998 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2999 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3001 canonicalize_component_ref (expr_p);
3004 expr_stack.release ();
3006 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3008 return ret;
3011 /* Gimplify the self modifying expression pointed to by EXPR_P
3012 (++, --, +=, -=).
3014 PRE_P points to the list where side effects that must happen before
3015 *EXPR_P should be stored.
3017 POST_P points to the list where side effects that must happen after
3018 *EXPR_P should be stored.
3020 WANT_VALUE is nonzero iff we want to use the value of this expression
3021 in another expression.
3023 ARITH_TYPE is the type the computation should be performed in. */
3025 enum gimplify_status
3026 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3027 bool want_value, tree arith_type)
3029 enum tree_code code;
3030 tree lhs, lvalue, rhs, t1;
3031 gimple_seq post = NULL, *orig_post_p = post_p;
3032 bool postfix;
3033 enum tree_code arith_code;
3034 enum gimplify_status ret;
3035 location_t loc = EXPR_LOCATION (*expr_p);
3037 code = TREE_CODE (*expr_p);
3039 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3040 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3042 /* Prefix or postfix? */
3043 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3044 /* Faster to treat as prefix if result is not used. */
3045 postfix = want_value;
3046 else
3047 postfix = false;
3049 /* For postfix, make sure the inner expression's post side effects
3050 are executed after side effects from this expression. */
3051 if (postfix)
3052 post_p = &post;
3054 /* Add or subtract? */
3055 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3056 arith_code = PLUS_EXPR;
3057 else
3058 arith_code = MINUS_EXPR;
3060 /* Gimplify the LHS into a GIMPLE lvalue. */
3061 lvalue = TREE_OPERAND (*expr_p, 0);
3062 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3063 if (ret == GS_ERROR)
3064 return ret;
3066 /* Extract the operands to the arithmetic operation. */
3067 lhs = lvalue;
3068 rhs = TREE_OPERAND (*expr_p, 1);
3070 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3071 that as the result value and in the postqueue operation. */
3072 if (postfix)
3074 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3075 if (ret == GS_ERROR)
3076 return ret;
3078 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
3081 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3082 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3084 rhs = convert_to_ptrofftype_loc (loc, rhs);
3085 if (arith_code == MINUS_EXPR)
3086 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3087 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3089 else
3090 t1 = fold_convert (TREE_TYPE (*expr_p),
3091 fold_build2 (arith_code, arith_type,
3092 fold_convert (arith_type, lhs),
3093 fold_convert (arith_type, rhs)));
3095 if (postfix)
3097 gimplify_assign (lvalue, t1, pre_p);
3098 gimplify_seq_add_seq (orig_post_p, post);
3099 *expr_p = lhs;
3100 return GS_ALL_DONE;
3102 else
3104 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3105 return GS_OK;
3109 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3111 static void
3112 maybe_with_size_expr (tree *expr_p)
3114 tree expr = *expr_p;
3115 tree type = TREE_TYPE (expr);
3116 tree size;
3118 /* If we've already wrapped this or the type is error_mark_node, we can't do
3119 anything. */
3120 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3121 || type == error_mark_node)
3122 return;
3124 /* If the size isn't known or is a constant, we have nothing to do. */
3125 size = TYPE_SIZE_UNIT (type);
3126 if (!size || poly_int_tree_p (size))
3127 return;
3129 /* Otherwise, make a WITH_SIZE_EXPR. */
3130 size = unshare_expr (size);
3131 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3132 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3135 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3136 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3137 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3138 gimplified to an SSA name. */
3140 enum gimplify_status
3141 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3142 bool allow_ssa)
3144 bool (*test) (tree);
3145 fallback_t fb;
3147 /* In general, we allow lvalues for function arguments to avoid
3148 extra overhead of copying large aggregates out of even larger
3149 aggregates into temporaries only to copy the temporaries to
3150 the argument list. Make optimizers happy by pulling out to
3151 temporaries those types that fit in registers. */
3152 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3153 test = is_gimple_val, fb = fb_rvalue;
3154 else
3156 test = is_gimple_lvalue, fb = fb_either;
3157 /* Also strip a TARGET_EXPR that would force an extra copy. */
3158 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3160 tree init = TARGET_EXPR_INITIAL (*arg_p);
3161 if (init
3162 && !VOID_TYPE_P (TREE_TYPE (init)))
3163 *arg_p = init;
3167 /* If this is a variable sized type, we must remember the size. */
3168 maybe_with_size_expr (arg_p);
3170 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3171 /* Make sure arguments have the same location as the function call
3172 itself. */
3173 protected_set_expr_location (*arg_p, call_location);
3175 /* There is a sequence point before a function call. Side effects in
3176 the argument list must occur before the actual call. So, when
3177 gimplifying arguments, force gimplify_expr to use an internal
3178 post queue which is then appended to the end of PRE_P. */
3179 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3182 /* Don't fold inside offloading or taskreg regions: it can break code by
3183 adding decl references that weren't in the source. We'll do it during
3184 omplower pass instead. */
3186 static bool
3187 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3189 struct gimplify_omp_ctx *ctx;
3190 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3191 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3192 return false;
3193 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3194 return false;
3195 return fold_stmt (gsi);
3198 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3199 WANT_VALUE is true if the result of the call is desired. */
3201 static enum gimplify_status
3202 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3204 tree fndecl, parms, p, fnptrtype;
3205 enum gimplify_status ret;
3206 int i, nargs;
3207 gcall *call;
3208 bool builtin_va_start_p = false;
3209 location_t loc = EXPR_LOCATION (*expr_p);
3211 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3213 /* For reliable diagnostics during inlining, it is necessary that
3214 every call_expr be annotated with file and line. */
3215 if (! EXPR_HAS_LOCATION (*expr_p))
3216 SET_EXPR_LOCATION (*expr_p, input_location);
3218 /* Gimplify internal functions created in the FEs. */
3219 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3221 if (want_value)
3222 return GS_ALL_DONE;
3224 nargs = call_expr_nargs (*expr_p);
3225 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3226 auto_vec<tree> vargs (nargs);
3228 for (i = 0; i < nargs; i++)
3230 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3231 EXPR_LOCATION (*expr_p));
3232 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3235 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3236 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3237 gimplify_seq_add_stmt (pre_p, call);
3238 return GS_ALL_DONE;
3241 /* This may be a call to a builtin function.
3243 Builtin function calls may be transformed into different
3244 (and more efficient) builtin function calls under certain
3245 circumstances. Unfortunately, gimplification can muck things
3246 up enough that the builtin expanders are not aware that certain
3247 transformations are still valid.
3249 So we attempt transformation/gimplification of the call before
3250 we gimplify the CALL_EXPR. At this time we do not manage to
3251 transform all calls in the same manner as the expanders do, but
3252 we do transform most of them. */
3253 fndecl = get_callee_fndecl (*expr_p);
3254 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3255 switch (DECL_FUNCTION_CODE (fndecl))
3257 CASE_BUILT_IN_ALLOCA:
3258 /* If the call has been built for a variable-sized object, then we
3259 want to restore the stack level when the enclosing BIND_EXPR is
3260 exited to reclaim the allocated space; otherwise, we precisely
3261 need to do the opposite and preserve the latest stack level. */
3262 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3263 gimplify_ctxp->save_stack = true;
3264 else
3265 gimplify_ctxp->keep_stack = true;
3266 break;
3268 case BUILT_IN_VA_START:
3270 builtin_va_start_p = TRUE;
3271 if (call_expr_nargs (*expr_p) < 2)
3273 error ("too few arguments to function %<va_start%>");
3274 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3275 return GS_OK;
3278 if (fold_builtin_next_arg (*expr_p, true))
3280 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3281 return GS_OK;
3283 break;
3286 default:
3289 if (fndecl && fndecl_built_in_p (fndecl))
3291 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3292 if (new_tree && new_tree != *expr_p)
3294 /* There was a transformation of this call which computes the
3295 same value, but in a more efficient way. Return and try
3296 again. */
3297 *expr_p = new_tree;
3298 return GS_OK;
3302 /* Remember the original function pointer type. */
3303 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3305 /* There is a sequence point before the call, so any side effects in
3306 the calling expression must occur before the actual call. Force
3307 gimplify_expr to use an internal post queue. */
3308 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3309 is_gimple_call_addr, fb_rvalue);
3311 nargs = call_expr_nargs (*expr_p);
3313 /* Get argument types for verification. */
3314 fndecl = get_callee_fndecl (*expr_p);
3315 parms = NULL_TREE;
3316 if (fndecl)
3317 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3318 else
3319 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3321 if (fndecl && DECL_ARGUMENTS (fndecl))
3322 p = DECL_ARGUMENTS (fndecl);
3323 else if (parms)
3324 p = parms;
3325 else
3326 p = NULL_TREE;
3327 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3330 /* If the last argument is __builtin_va_arg_pack () and it is not
3331 passed as a named argument, decrease the number of CALL_EXPR
3332 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3333 if (!p
3334 && i < nargs
3335 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3337 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3338 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3340 if (last_arg_fndecl
3341 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3343 tree call = *expr_p;
3345 --nargs;
3346 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3347 CALL_EXPR_FN (call),
3348 nargs, CALL_EXPR_ARGP (call));
3350 /* Copy all CALL_EXPR flags, location and block, except
3351 CALL_EXPR_VA_ARG_PACK flag. */
3352 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3353 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3354 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3355 = CALL_EXPR_RETURN_SLOT_OPT (call);
3356 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3357 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3359 /* Set CALL_EXPR_VA_ARG_PACK. */
3360 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3364 /* If the call returns twice then after building the CFG the call
3365 argument computations will no longer dominate the call because
3366 we add an abnormal incoming edge to the call. So do not use SSA
3367 vars there. */
3368 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3370 /* Gimplify the function arguments. */
3371 if (nargs > 0)
3373 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3374 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3375 PUSH_ARGS_REVERSED ? i-- : i++)
3377 enum gimplify_status t;
3379 /* Avoid gimplifying the second argument to va_start, which needs to
3380 be the plain PARM_DECL. */
3381 if ((i != 1) || !builtin_va_start_p)
3383 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3384 EXPR_LOCATION (*expr_p), ! returns_twice);
3386 if (t == GS_ERROR)
3387 ret = GS_ERROR;
3392 /* Gimplify the static chain. */
3393 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3395 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3396 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3397 else
3399 enum gimplify_status t;
3400 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3401 EXPR_LOCATION (*expr_p), ! returns_twice);
3402 if (t == GS_ERROR)
3403 ret = GS_ERROR;
3407 /* Verify the function result. */
3408 if (want_value && fndecl
3409 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3411 error_at (loc, "using result of function returning %<void%>");
3412 ret = GS_ERROR;
3415 /* Try this again in case gimplification exposed something. */
3416 if (ret != GS_ERROR)
3418 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3420 if (new_tree && new_tree != *expr_p)
3422 /* There was a transformation of this call which computes the
3423 same value, but in a more efficient way. Return and try
3424 again. */
3425 *expr_p = new_tree;
3426 return GS_OK;
3429 else
3431 *expr_p = error_mark_node;
3432 return GS_ERROR;
3435 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3436 decl. This allows us to eliminate redundant or useless
3437 calls to "const" functions. */
3438 if (TREE_CODE (*expr_p) == CALL_EXPR)
3440 int flags = call_expr_flags (*expr_p);
3441 if (flags & (ECF_CONST | ECF_PURE)
3442 /* An infinite loop is considered a side effect. */
3443 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3444 TREE_SIDE_EFFECTS (*expr_p) = 0;
3447 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3448 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3449 form and delegate the creation of a GIMPLE_CALL to
3450 gimplify_modify_expr. This is always possible because when
3451 WANT_VALUE is true, the caller wants the result of this call into
3452 a temporary, which means that we will emit an INIT_EXPR in
3453 internal_get_tmp_var which will then be handled by
3454 gimplify_modify_expr. */
3455 if (!want_value)
3457 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3458 have to do is replicate it as a GIMPLE_CALL tuple. */
3459 gimple_stmt_iterator gsi;
3460 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3461 notice_special_calls (call);
3462 gimplify_seq_add_stmt (pre_p, call);
3463 gsi = gsi_last (*pre_p);
3464 maybe_fold_stmt (&gsi);
3465 *expr_p = NULL_TREE;
3467 else
3468 /* Remember the original function type. */
3469 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3470 CALL_EXPR_FN (*expr_p));
3472 return ret;
3475 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3476 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3478 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3479 condition is true or false, respectively. If null, we should generate
3480 our own to skip over the evaluation of this specific expression.
3482 LOCUS is the source location of the COND_EXPR.
3484 This function is the tree equivalent of do_jump.
3486 shortcut_cond_r should only be called by shortcut_cond_expr. */
3488 static tree
3489 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3490 location_t locus)
3492 tree local_label = NULL_TREE;
3493 tree t, expr = NULL;
3495 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3496 retain the shortcut semantics. Just insert the gotos here;
3497 shortcut_cond_expr will append the real blocks later. */
3498 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3500 location_t new_locus;
3502 /* Turn if (a && b) into
3504 if (a); else goto no;
3505 if (b) goto yes; else goto no;
3506 (no:) */
3508 if (false_label_p == NULL)
3509 false_label_p = &local_label;
3511 /* Keep the original source location on the first 'if'. */
3512 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3513 append_to_statement_list (t, &expr);
3515 /* Set the source location of the && on the second 'if'. */
3516 new_locus = rexpr_location (pred, locus);
3517 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3518 new_locus);
3519 append_to_statement_list (t, &expr);
3521 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3523 location_t new_locus;
3525 /* Turn if (a || b) into
3527 if (a) goto yes;
3528 if (b) goto yes; else goto no;
3529 (yes:) */
3531 if (true_label_p == NULL)
3532 true_label_p = &local_label;
3534 /* Keep the original source location on the first 'if'. */
3535 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3536 append_to_statement_list (t, &expr);
3538 /* Set the source location of the || on the second 'if'. */
3539 new_locus = rexpr_location (pred, locus);
3540 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3541 new_locus);
3542 append_to_statement_list (t, &expr);
3544 else if (TREE_CODE (pred) == COND_EXPR
3545 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3546 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3548 location_t new_locus;
3550 /* As long as we're messing with gotos, turn if (a ? b : c) into
3551 if (a)
3552 if (b) goto yes; else goto no;
3553 else
3554 if (c) goto yes; else goto no;
3556 Don't do this if one of the arms has void type, which can happen
3557 in C++ when the arm is throw. */
3559 /* Keep the original source location on the first 'if'. Set the source
3560 location of the ? on the second 'if'. */
3561 new_locus = rexpr_location (pred, locus);
3562 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3563 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3564 false_label_p, locus),
3565 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3566 false_label_p, new_locus));
3568 else
3570 expr = build3 (COND_EXPR, void_type_node, pred,
3571 build_and_jump (true_label_p),
3572 build_and_jump (false_label_p));
3573 SET_EXPR_LOCATION (expr, locus);
3576 if (local_label)
3578 t = build1 (LABEL_EXPR, void_type_node, local_label);
3579 append_to_statement_list (t, &expr);
3582 return expr;
3585 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3586 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3587 statement, if it is the last one. Otherwise, return NULL. */
3589 static tree
3590 find_goto (tree expr)
3592 if (!expr)
3593 return NULL_TREE;
3595 if (TREE_CODE (expr) == GOTO_EXPR)
3596 return expr;
3598 if (TREE_CODE (expr) != STATEMENT_LIST)
3599 return NULL_TREE;
3601 tree_stmt_iterator i = tsi_start (expr);
3603 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3604 tsi_next (&i);
3606 if (!tsi_one_before_end_p (i))
3607 return NULL_TREE;
3609 return find_goto (tsi_stmt (i));
3612 /* Same as find_goto, except that it returns NULL if the destination
3613 is not a LABEL_DECL. */
3615 static inline tree
3616 find_goto_label (tree expr)
3618 tree dest = find_goto (expr);
3619 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3620 return dest;
3621 return NULL_TREE;
3624 /* Given a conditional expression EXPR with short-circuit boolean
3625 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3626 predicate apart into the equivalent sequence of conditionals. */
3628 static tree
3629 shortcut_cond_expr (tree expr)
3631 tree pred = TREE_OPERAND (expr, 0);
3632 tree then_ = TREE_OPERAND (expr, 1);
3633 tree else_ = TREE_OPERAND (expr, 2);
3634 tree true_label, false_label, end_label, t;
3635 tree *true_label_p;
3636 tree *false_label_p;
3637 bool emit_end, emit_false, jump_over_else;
3638 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3639 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3641 /* First do simple transformations. */
3642 if (!else_se)
3644 /* If there is no 'else', turn
3645 if (a && b) then c
3646 into
3647 if (a) if (b) then c. */
3648 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3650 /* Keep the original source location on the first 'if'. */
3651 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3652 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3653 /* Set the source location of the && on the second 'if'. */
3654 if (rexpr_has_location (pred))
3655 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3656 then_ = shortcut_cond_expr (expr);
3657 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3658 pred = TREE_OPERAND (pred, 0);
3659 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3660 SET_EXPR_LOCATION (expr, locus);
3664 if (!then_se)
3666 /* If there is no 'then', turn
3667 if (a || b); else d
3668 into
3669 if (a); else if (b); else d. */
3670 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3672 /* Keep the original source location on the first 'if'. */
3673 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3674 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3675 /* Set the source location of the || on the second 'if'. */
3676 if (rexpr_has_location (pred))
3677 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3678 else_ = shortcut_cond_expr (expr);
3679 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3680 pred = TREE_OPERAND (pred, 0);
3681 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3682 SET_EXPR_LOCATION (expr, locus);
3686 /* If we're done, great. */
3687 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3688 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3689 return expr;
3691 /* Otherwise we need to mess with gotos. Change
3692 if (a) c; else d;
3694 if (a); else goto no;
3695 c; goto end;
3696 no: d; end:
3697 and recursively gimplify the condition. */
3699 true_label = false_label = end_label = NULL_TREE;
3701 /* If our arms just jump somewhere, hijack those labels so we don't
3702 generate jumps to jumps. */
3704 if (tree then_goto = find_goto_label (then_))
3706 true_label = GOTO_DESTINATION (then_goto);
3707 then_ = NULL;
3708 then_se = false;
3711 if (tree else_goto = find_goto_label (else_))
3713 false_label = GOTO_DESTINATION (else_goto);
3714 else_ = NULL;
3715 else_se = false;
3718 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3719 if (true_label)
3720 true_label_p = &true_label;
3721 else
3722 true_label_p = NULL;
3724 /* The 'else' branch also needs a label if it contains interesting code. */
3725 if (false_label || else_se)
3726 false_label_p = &false_label;
3727 else
3728 false_label_p = NULL;
3730 /* If there was nothing else in our arms, just forward the label(s). */
3731 if (!then_se && !else_se)
3732 return shortcut_cond_r (pred, true_label_p, false_label_p,
3733 EXPR_LOC_OR_LOC (expr, input_location));
3735 /* If our last subexpression already has a terminal label, reuse it. */
3736 if (else_se)
3737 t = expr_last (else_);
3738 else if (then_se)
3739 t = expr_last (then_);
3740 else
3741 t = NULL;
3742 if (t && TREE_CODE (t) == LABEL_EXPR)
3743 end_label = LABEL_EXPR_LABEL (t);
3745 /* If we don't care about jumping to the 'else' branch, jump to the end
3746 if the condition is false. */
3747 if (!false_label_p)
3748 false_label_p = &end_label;
3750 /* We only want to emit these labels if we aren't hijacking them. */
3751 emit_end = (end_label == NULL_TREE);
3752 emit_false = (false_label == NULL_TREE);
3754 /* We only emit the jump over the else clause if we have to--if the
3755 then clause may fall through. Otherwise we can wind up with a
3756 useless jump and a useless label at the end of gimplified code,
3757 which will cause us to think that this conditional as a whole
3758 falls through even if it doesn't. If we then inline a function
3759 which ends with such a condition, that can cause us to issue an
3760 inappropriate warning about control reaching the end of a
3761 non-void function. */
3762 jump_over_else = block_may_fallthru (then_);
3764 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3765 EXPR_LOC_OR_LOC (expr, input_location));
3767 expr = NULL;
3768 append_to_statement_list (pred, &expr);
3770 append_to_statement_list (then_, &expr);
3771 if (else_se)
3773 if (jump_over_else)
3775 tree last = expr_last (expr);
3776 t = build_and_jump (&end_label);
3777 if (rexpr_has_location (last))
3778 SET_EXPR_LOCATION (t, rexpr_location (last));
3779 append_to_statement_list (t, &expr);
3781 if (emit_false)
3783 t = build1 (LABEL_EXPR, void_type_node, false_label);
3784 append_to_statement_list (t, &expr);
3786 append_to_statement_list (else_, &expr);
3788 if (emit_end && end_label)
3790 t = build1 (LABEL_EXPR, void_type_node, end_label);
3791 append_to_statement_list (t, &expr);
3794 return expr;
3797 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3799 tree
3800 gimple_boolify (tree expr)
3802 tree type = TREE_TYPE (expr);
3803 location_t loc = EXPR_LOCATION (expr);
3805 if (TREE_CODE (expr) == NE_EXPR
3806 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3807 && integer_zerop (TREE_OPERAND (expr, 1)))
3809 tree call = TREE_OPERAND (expr, 0);
3810 tree fn = get_callee_fndecl (call);
3812 /* For __builtin_expect ((long) (x), y) recurse into x as well
3813 if x is truth_value_p. */
3814 if (fn
3815 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3816 && call_expr_nargs (call) == 2)
3818 tree arg = CALL_EXPR_ARG (call, 0);
3819 if (arg)
3821 if (TREE_CODE (arg) == NOP_EXPR
3822 && TREE_TYPE (arg) == TREE_TYPE (call))
3823 arg = TREE_OPERAND (arg, 0);
3824 if (truth_value_p (TREE_CODE (arg)))
3826 arg = gimple_boolify (arg);
3827 CALL_EXPR_ARG (call, 0)
3828 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3834 switch (TREE_CODE (expr))
3836 case TRUTH_AND_EXPR:
3837 case TRUTH_OR_EXPR:
3838 case TRUTH_XOR_EXPR:
3839 case TRUTH_ANDIF_EXPR:
3840 case TRUTH_ORIF_EXPR:
3841 /* Also boolify the arguments of truth exprs. */
3842 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3843 /* FALLTHRU */
3845 case TRUTH_NOT_EXPR:
3846 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3848 /* These expressions always produce boolean results. */
3849 if (TREE_CODE (type) != BOOLEAN_TYPE)
3850 TREE_TYPE (expr) = boolean_type_node;
3851 return expr;
3853 case ANNOTATE_EXPR:
3854 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3856 case annot_expr_ivdep_kind:
3857 case annot_expr_unroll_kind:
3858 case annot_expr_no_vector_kind:
3859 case annot_expr_vector_kind:
3860 case annot_expr_parallel_kind:
3861 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3862 if (TREE_CODE (type) != BOOLEAN_TYPE)
3863 TREE_TYPE (expr) = boolean_type_node;
3864 return expr;
3865 default:
3866 gcc_unreachable ();
3869 default:
3870 if (COMPARISON_CLASS_P (expr))
3872 /* There expressions always prduce boolean results. */
3873 if (TREE_CODE (type) != BOOLEAN_TYPE)
3874 TREE_TYPE (expr) = boolean_type_node;
3875 return expr;
3877 /* Other expressions that get here must have boolean values, but
3878 might need to be converted to the appropriate mode. */
3879 if (TREE_CODE (type) == BOOLEAN_TYPE)
3880 return expr;
3881 return fold_convert_loc (loc, boolean_type_node, expr);
3885 /* Given a conditional expression *EXPR_P without side effects, gimplify
3886 its operands. New statements are inserted to PRE_P. */
3888 static enum gimplify_status
3889 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3891 tree expr = *expr_p, cond;
3892 enum gimplify_status ret, tret;
3893 enum tree_code code;
3895 cond = gimple_boolify (COND_EXPR_COND (expr));
3897 /* We need to handle && and || specially, as their gimplification
3898 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3899 code = TREE_CODE (cond);
3900 if (code == TRUTH_ANDIF_EXPR)
3901 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3902 else if (code == TRUTH_ORIF_EXPR)
3903 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3904 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3905 COND_EXPR_COND (*expr_p) = cond;
3907 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3908 is_gimple_val, fb_rvalue);
3909 ret = MIN (ret, tret);
3910 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3911 is_gimple_val, fb_rvalue);
3913 return MIN (ret, tret);
3916 /* Return true if evaluating EXPR could trap.
3917 EXPR is GENERIC, while tree_could_trap_p can be called
3918 only on GIMPLE. */
3920 bool
3921 generic_expr_could_trap_p (tree expr)
3923 unsigned i, n;
3925 if (!expr || is_gimple_val (expr))
3926 return false;
3928 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3929 return true;
3931 n = TREE_OPERAND_LENGTH (expr);
3932 for (i = 0; i < n; i++)
3933 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3934 return true;
3936 return false;
3939 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3940 into
3942 if (p) if (p)
3943 t1 = a; a;
3944 else or else
3945 t1 = b; b;
3948 The second form is used when *EXPR_P is of type void.
3950 PRE_P points to the list where side effects that must happen before
3951 *EXPR_P should be stored. */
3953 static enum gimplify_status
3954 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3956 tree expr = *expr_p;
3957 tree type = TREE_TYPE (expr);
3958 location_t loc = EXPR_LOCATION (expr);
3959 tree tmp, arm1, arm2;
3960 enum gimplify_status ret;
3961 tree label_true, label_false, label_cont;
3962 bool have_then_clause_p, have_else_clause_p;
3963 gcond *cond_stmt;
3964 enum tree_code pred_code;
3965 gimple_seq seq = NULL;
3967 /* If this COND_EXPR has a value, copy the values into a temporary within
3968 the arms. */
3969 if (!VOID_TYPE_P (type))
3971 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3972 tree result;
3974 /* If either an rvalue is ok or we do not require an lvalue, create the
3975 temporary. But we cannot do that if the type is addressable. */
3976 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3977 && !TREE_ADDRESSABLE (type))
3979 if (gimplify_ctxp->allow_rhs_cond_expr
3980 /* If either branch has side effects or could trap, it can't be
3981 evaluated unconditionally. */
3982 && !TREE_SIDE_EFFECTS (then_)
3983 && !generic_expr_could_trap_p (then_)
3984 && !TREE_SIDE_EFFECTS (else_)
3985 && !generic_expr_could_trap_p (else_))
3986 return gimplify_pure_cond_expr (expr_p, pre_p);
3988 tmp = create_tmp_var (type, "iftmp");
3989 result = tmp;
3992 /* Otherwise, only create and copy references to the values. */
3993 else
3995 type = build_pointer_type (type);
3997 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3998 then_ = build_fold_addr_expr_loc (loc, then_);
4000 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4001 else_ = build_fold_addr_expr_loc (loc, else_);
4003 expr
4004 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4006 tmp = create_tmp_var (type, "iftmp");
4007 result = build_simple_mem_ref_loc (loc, tmp);
4010 /* Build the new then clause, `tmp = then_;'. But don't build the
4011 assignment if the value is void; in C++ it can be if it's a throw. */
4012 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4013 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
4015 /* Similarly, build the new else clause, `tmp = else_;'. */
4016 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4017 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
4019 TREE_TYPE (expr) = void_type_node;
4020 recalculate_side_effects (expr);
4022 /* Move the COND_EXPR to the prequeue. */
4023 gimplify_stmt (&expr, pre_p);
4025 *expr_p = result;
4026 return GS_ALL_DONE;
4029 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4030 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4031 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4032 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4034 /* Make sure the condition has BOOLEAN_TYPE. */
4035 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4037 /* Break apart && and || conditions. */
4038 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4039 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4041 expr = shortcut_cond_expr (expr);
4043 if (expr != *expr_p)
4045 *expr_p = expr;
4047 /* We can't rely on gimplify_expr to re-gimplify the expanded
4048 form properly, as cleanups might cause the target labels to be
4049 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4050 set up a conditional context. */
4051 gimple_push_condition ();
4052 gimplify_stmt (expr_p, &seq);
4053 gimple_pop_condition (pre_p);
4054 gimple_seq_add_seq (pre_p, seq);
4056 return GS_ALL_DONE;
4060 /* Now do the normal gimplification. */
4062 /* Gimplify condition. */
4063 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
4064 fb_rvalue);
4065 if (ret == GS_ERROR)
4066 return GS_ERROR;
4067 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4069 gimple_push_condition ();
4071 have_then_clause_p = have_else_clause_p = false;
4072 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4073 if (label_true
4074 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4075 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4076 have different locations, otherwise we end up with incorrect
4077 location information on the branches. */
4078 && (optimize
4079 || !EXPR_HAS_LOCATION (expr)
4080 || !rexpr_has_location (label_true)
4081 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4083 have_then_clause_p = true;
4084 label_true = GOTO_DESTINATION (label_true);
4086 else
4087 label_true = create_artificial_label (UNKNOWN_LOCATION);
4088 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4089 if (label_false
4090 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4091 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4092 have different locations, otherwise we end up with incorrect
4093 location information on the branches. */
4094 && (optimize
4095 || !EXPR_HAS_LOCATION (expr)
4096 || !rexpr_has_location (label_false)
4097 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4099 have_else_clause_p = true;
4100 label_false = GOTO_DESTINATION (label_false);
4102 else
4103 label_false = create_artificial_label (UNKNOWN_LOCATION);
4105 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4106 &arm2);
4107 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4108 label_false);
4109 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4110 gimplify_seq_add_stmt (&seq, cond_stmt);
4111 gimple_stmt_iterator gsi = gsi_last (seq);
4112 maybe_fold_stmt (&gsi);
4114 label_cont = NULL_TREE;
4115 if (!have_then_clause_p)
4117 /* For if (...) {} else { code; } put label_true after
4118 the else block. */
4119 if (TREE_OPERAND (expr, 1) == NULL_TREE
4120 && !have_else_clause_p
4121 && TREE_OPERAND (expr, 2) != NULL_TREE)
4122 label_cont = label_true;
4123 else
4125 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4126 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4127 /* For if (...) { code; } else {} or
4128 if (...) { code; } else goto label; or
4129 if (...) { code; return; } else { ... }
4130 label_cont isn't needed. */
4131 if (!have_else_clause_p
4132 && TREE_OPERAND (expr, 2) != NULL_TREE
4133 && gimple_seq_may_fallthru (seq))
4135 gimple *g;
4136 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4138 g = gimple_build_goto (label_cont);
4140 /* GIMPLE_COND's are very low level; they have embedded
4141 gotos. This particular embedded goto should not be marked
4142 with the location of the original COND_EXPR, as it would
4143 correspond to the COND_EXPR's condition, not the ELSE or the
4144 THEN arms. To avoid marking it with the wrong location, flag
4145 it as "no location". */
4146 gimple_set_do_not_emit_location (g);
4148 gimplify_seq_add_stmt (&seq, g);
4152 if (!have_else_clause_p)
4154 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4155 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4157 if (label_cont)
4158 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4160 gimple_pop_condition (pre_p);
4161 gimple_seq_add_seq (pre_p, seq);
4163 if (ret == GS_ERROR)
4164 ; /* Do nothing. */
4165 else if (have_then_clause_p || have_else_clause_p)
4166 ret = GS_ALL_DONE;
4167 else
4169 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4170 expr = TREE_OPERAND (expr, 0);
4171 gimplify_stmt (&expr, pre_p);
4174 *expr_p = NULL;
4175 return ret;
4178 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4179 to be marked addressable.
4181 We cannot rely on such an expression being directly markable if a temporary
4182 has been created by the gimplification. In this case, we create another
4183 temporary and initialize it with a copy, which will become a store after we
4184 mark it addressable. This can happen if the front-end passed us something
4185 that it could not mark addressable yet, like a Fortran pass-by-reference
4186 parameter (int) floatvar. */
4188 static void
4189 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4191 while (handled_component_p (*expr_p))
4192 expr_p = &TREE_OPERAND (*expr_p, 0);
4193 if (is_gimple_reg (*expr_p))
4195 /* Do not allow an SSA name as the temporary. */
4196 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4197 DECL_GIMPLE_REG_P (var) = 0;
4198 *expr_p = var;
4202 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4203 a call to __builtin_memcpy. */
4205 static enum gimplify_status
4206 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4207 gimple_seq *seq_p)
4209 tree t, to, to_ptr, from, from_ptr;
4210 gcall *gs;
4211 location_t loc = EXPR_LOCATION (*expr_p);
4213 to = TREE_OPERAND (*expr_p, 0);
4214 from = TREE_OPERAND (*expr_p, 1);
4216 /* Mark the RHS addressable. Beware that it may not be possible to do so
4217 directly if a temporary has been created by the gimplification. */
4218 prepare_gimple_addressable (&from, seq_p);
4220 mark_addressable (from);
4221 from_ptr = build_fold_addr_expr_loc (loc, from);
4222 gimplify_arg (&from_ptr, seq_p, loc);
4224 mark_addressable (to);
4225 to_ptr = build_fold_addr_expr_loc (loc, to);
4226 gimplify_arg (&to_ptr, seq_p, loc);
4228 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4230 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4232 if (want_value)
4234 /* tmp = memcpy() */
4235 t = create_tmp_var (TREE_TYPE (to_ptr));
4236 gimple_call_set_lhs (gs, t);
4237 gimplify_seq_add_stmt (seq_p, gs);
4239 *expr_p = build_simple_mem_ref (t);
4240 return GS_ALL_DONE;
4243 gimplify_seq_add_stmt (seq_p, gs);
4244 *expr_p = NULL;
4245 return GS_ALL_DONE;
4248 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4249 a call to __builtin_memset. In this case we know that the RHS is
4250 a CONSTRUCTOR with an empty element list. */
4252 static enum gimplify_status
4253 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4254 gimple_seq *seq_p)
4256 tree t, from, to, to_ptr;
4257 gcall *gs;
4258 location_t loc = EXPR_LOCATION (*expr_p);
4260 /* Assert our assumptions, to abort instead of producing wrong code
4261 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4262 not be immediately exposed. */
4263 from = TREE_OPERAND (*expr_p, 1);
4264 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4265 from = TREE_OPERAND (from, 0);
4267 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4268 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4270 /* Now proceed. */
4271 to = TREE_OPERAND (*expr_p, 0);
4273 to_ptr = build_fold_addr_expr_loc (loc, to);
4274 gimplify_arg (&to_ptr, seq_p, loc);
4275 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4277 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4279 if (want_value)
4281 /* tmp = memset() */
4282 t = create_tmp_var (TREE_TYPE (to_ptr));
4283 gimple_call_set_lhs (gs, t);
4284 gimplify_seq_add_stmt (seq_p, gs);
4286 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4287 return GS_ALL_DONE;
4290 gimplify_seq_add_stmt (seq_p, gs);
4291 *expr_p = NULL;
4292 return GS_ALL_DONE;
4295 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4296 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4297 assignment. Return non-null if we detect a potential overlap. */
4299 struct gimplify_init_ctor_preeval_data
4301 /* The base decl of the lhs object. May be NULL, in which case we
4302 have to assume the lhs is indirect. */
4303 tree lhs_base_decl;
4305 /* The alias set of the lhs object. */
4306 alias_set_type lhs_alias_set;
4309 static tree
4310 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4312 struct gimplify_init_ctor_preeval_data *data
4313 = (struct gimplify_init_ctor_preeval_data *) xdata;
4314 tree t = *tp;
4316 /* If we find the base object, obviously we have overlap. */
4317 if (data->lhs_base_decl == t)
4318 return t;
4320 /* If the constructor component is indirect, determine if we have a
4321 potential overlap with the lhs. The only bits of information we
4322 have to go on at this point are addressability and alias sets. */
4323 if ((INDIRECT_REF_P (t)
4324 || TREE_CODE (t) == MEM_REF)
4325 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4326 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4327 return t;
4329 /* If the constructor component is a call, determine if it can hide a
4330 potential overlap with the lhs through an INDIRECT_REF like above.
4331 ??? Ugh - this is completely broken. In fact this whole analysis
4332 doesn't look conservative. */
4333 if (TREE_CODE (t) == CALL_EXPR)
4335 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4337 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4338 if (POINTER_TYPE_P (TREE_VALUE (type))
4339 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4340 && alias_sets_conflict_p (data->lhs_alias_set,
4341 get_alias_set
4342 (TREE_TYPE (TREE_VALUE (type)))))
4343 return t;
4346 if (IS_TYPE_OR_DECL_P (t))
4347 *walk_subtrees = 0;
4348 return NULL;
4351 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4352 force values that overlap with the lhs (as described by *DATA)
4353 into temporaries. */
4355 static void
4356 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4357 struct gimplify_init_ctor_preeval_data *data)
4359 enum gimplify_status one;
4361 /* If the value is constant, then there's nothing to pre-evaluate. */
4362 if (TREE_CONSTANT (*expr_p))
4364 /* Ensure it does not have side effects, it might contain a reference to
4365 the object we're initializing. */
4366 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4367 return;
4370 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4371 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4372 return;
4374 /* Recurse for nested constructors. */
4375 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4377 unsigned HOST_WIDE_INT ix;
4378 constructor_elt *ce;
4379 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4381 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4382 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4384 return;
4387 /* If this is a variable sized type, we must remember the size. */
4388 maybe_with_size_expr (expr_p);
4390 /* Gimplify the constructor element to something appropriate for the rhs
4391 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4392 the gimplifier will consider this a store to memory. Doing this
4393 gimplification now means that we won't have to deal with complicated
4394 language-specific trees, nor trees like SAVE_EXPR that can induce
4395 exponential search behavior. */
4396 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4397 if (one == GS_ERROR)
4399 *expr_p = NULL;
4400 return;
4403 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4404 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4405 always be true for all scalars, since is_gimple_mem_rhs insists on a
4406 temporary variable for them. */
4407 if (DECL_P (*expr_p))
4408 return;
4410 /* If this is of variable size, we have no choice but to assume it doesn't
4411 overlap since we can't make a temporary for it. */
4412 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4413 return;
4415 /* Otherwise, we must search for overlap ... */
4416 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4417 return;
4419 /* ... and if found, force the value into a temporary. */
4420 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4423 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4424 a RANGE_EXPR in a CONSTRUCTOR for an array.
4426 var = lower;
4427 loop_entry:
4428 object[var] = value;
4429 if (var == upper)
4430 goto loop_exit;
4431 var = var + 1;
4432 goto loop_entry;
4433 loop_exit:
4435 We increment var _after_ the loop exit check because we might otherwise
4436 fail if upper == TYPE_MAX_VALUE (type for upper).
4438 Note that we never have to deal with SAVE_EXPRs here, because this has
4439 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4441 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4442 gimple_seq *, bool);
4444 static void
4445 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4446 tree value, tree array_elt_type,
4447 gimple_seq *pre_p, bool cleared)
4449 tree loop_entry_label, loop_exit_label, fall_thru_label;
4450 tree var, var_type, cref, tmp;
4452 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4453 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4454 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4456 /* Create and initialize the index variable. */
4457 var_type = TREE_TYPE (upper);
4458 var = create_tmp_var (var_type);
4459 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4461 /* Add the loop entry label. */
4462 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4464 /* Build the reference. */
4465 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4466 var, NULL_TREE, NULL_TREE);
4468 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4469 the store. Otherwise just assign value to the reference. */
4471 if (TREE_CODE (value) == CONSTRUCTOR)
4472 /* NB we might have to call ourself recursively through
4473 gimplify_init_ctor_eval if the value is a constructor. */
4474 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4475 pre_p, cleared);
4476 else
4477 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4479 /* We exit the loop when the index var is equal to the upper bound. */
4480 gimplify_seq_add_stmt (pre_p,
4481 gimple_build_cond (EQ_EXPR, var, upper,
4482 loop_exit_label, fall_thru_label));
4484 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4486 /* Otherwise, increment the index var... */
4487 tmp = build2 (PLUS_EXPR, var_type, var,
4488 fold_convert (var_type, integer_one_node));
4489 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4491 /* ...and jump back to the loop entry. */
4492 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4494 /* Add the loop exit label. */
4495 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4498 /* Return true if FDECL is accessing a field that is zero sized. */
4500 static bool
4501 zero_sized_field_decl (const_tree fdecl)
4503 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4504 && integer_zerop (DECL_SIZE (fdecl)))
4505 return true;
4506 return false;
4509 /* Return true if TYPE is zero sized. */
4511 static bool
4512 zero_sized_type (const_tree type)
4514 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4515 && integer_zerop (TYPE_SIZE (type)))
4516 return true;
4517 return false;
4520 /* A subroutine of gimplify_init_constructor. Generate individual
4521 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4522 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4523 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4524 zeroed first. */
4526 static void
4527 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4528 gimple_seq *pre_p, bool cleared)
4530 tree array_elt_type = NULL;
4531 unsigned HOST_WIDE_INT ix;
4532 tree purpose, value;
4534 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4535 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4537 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4539 tree cref;
4541 /* NULL values are created above for gimplification errors. */
4542 if (value == NULL)
4543 continue;
4545 if (cleared && initializer_zerop (value))
4546 continue;
4548 /* ??? Here's to hoping the front end fills in all of the indices,
4549 so we don't have to figure out what's missing ourselves. */
4550 gcc_assert (purpose);
4552 /* Skip zero-sized fields, unless value has side-effects. This can
4553 happen with calls to functions returning a zero-sized type, which
4554 we shouldn't discard. As a number of downstream passes don't
4555 expect sets of zero-sized fields, we rely on the gimplification of
4556 the MODIFY_EXPR we make below to drop the assignment statement. */
4557 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4558 continue;
4560 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4561 whole range. */
4562 if (TREE_CODE (purpose) == RANGE_EXPR)
4564 tree lower = TREE_OPERAND (purpose, 0);
4565 tree upper = TREE_OPERAND (purpose, 1);
4567 /* If the lower bound is equal to upper, just treat it as if
4568 upper was the index. */
4569 if (simple_cst_equal (lower, upper))
4570 purpose = upper;
4571 else
4573 gimplify_init_ctor_eval_range (object, lower, upper, value,
4574 array_elt_type, pre_p, cleared);
4575 continue;
4579 if (array_elt_type)
4581 /* Do not use bitsizetype for ARRAY_REF indices. */
4582 if (TYPE_DOMAIN (TREE_TYPE (object)))
4583 purpose
4584 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4585 purpose);
4586 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4587 purpose, NULL_TREE, NULL_TREE);
4589 else
4591 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4592 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4593 unshare_expr (object), purpose, NULL_TREE);
4596 if (TREE_CODE (value) == CONSTRUCTOR
4597 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4598 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4599 pre_p, cleared);
4600 else
4602 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4603 gimplify_and_add (init, pre_p);
4604 ggc_free (init);
4609 /* Return the appropriate RHS predicate for this LHS. */
4611 gimple_predicate
4612 rhs_predicate_for (tree lhs)
4614 if (is_gimple_reg (lhs))
4615 return is_gimple_reg_rhs_or_call;
4616 else
4617 return is_gimple_mem_rhs_or_call;
4620 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4621 before the LHS has been gimplified. */
4623 static gimple_predicate
4624 initial_rhs_predicate_for (tree lhs)
4626 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4627 return is_gimple_reg_rhs_or_call;
4628 else
4629 return is_gimple_mem_rhs_or_call;
4632 /* Gimplify a C99 compound literal expression. This just means adding
4633 the DECL_EXPR before the current statement and using its anonymous
4634 decl instead. */
4636 static enum gimplify_status
4637 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4638 bool (*gimple_test_f) (tree),
4639 fallback_t fallback)
4641 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4642 tree decl = DECL_EXPR_DECL (decl_s);
4643 tree init = DECL_INITIAL (decl);
4644 /* Mark the decl as addressable if the compound literal
4645 expression is addressable now, otherwise it is marked too late
4646 after we gimplify the initialization expression. */
4647 if (TREE_ADDRESSABLE (*expr_p))
4648 TREE_ADDRESSABLE (decl) = 1;
4649 /* Otherwise, if we don't need an lvalue and have a literal directly
4650 substitute it. Check if it matches the gimple predicate, as
4651 otherwise we'd generate a new temporary, and we can as well just
4652 use the decl we already have. */
4653 else if (!TREE_ADDRESSABLE (decl)
4654 && init
4655 && (fallback & fb_lvalue) == 0
4656 && gimple_test_f (init))
4658 *expr_p = init;
4659 return GS_OK;
4662 /* Preliminarily mark non-addressed complex variables as eligible
4663 for promotion to gimple registers. We'll transform their uses
4664 as we find them. */
4665 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4666 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4667 && !TREE_THIS_VOLATILE (decl)
4668 && !needs_to_live_in_memory (decl))
4669 DECL_GIMPLE_REG_P (decl) = 1;
4671 /* If the decl is not addressable, then it is being used in some
4672 expression or on the right hand side of a statement, and it can
4673 be put into a readonly data section. */
4674 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4675 TREE_READONLY (decl) = 1;
4677 /* This decl isn't mentioned in the enclosing block, so add it to the
4678 list of temps. FIXME it seems a bit of a kludge to say that
4679 anonymous artificial vars aren't pushed, but everything else is. */
4680 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4681 gimple_add_tmp_var (decl);
4683 gimplify_and_add (decl_s, pre_p);
4684 *expr_p = decl;
4685 return GS_OK;
4688 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4689 return a new CONSTRUCTOR if something changed. */
4691 static tree
4692 optimize_compound_literals_in_ctor (tree orig_ctor)
4694 tree ctor = orig_ctor;
4695 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4696 unsigned int idx, num = vec_safe_length (elts);
4698 for (idx = 0; idx < num; idx++)
4700 tree value = (*elts)[idx].value;
4701 tree newval = value;
4702 if (TREE_CODE (value) == CONSTRUCTOR)
4703 newval = optimize_compound_literals_in_ctor (value);
4704 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4706 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4707 tree decl = DECL_EXPR_DECL (decl_s);
4708 tree init = DECL_INITIAL (decl);
4710 if (!TREE_ADDRESSABLE (value)
4711 && !TREE_ADDRESSABLE (decl)
4712 && init
4713 && TREE_CODE (init) == CONSTRUCTOR)
4714 newval = optimize_compound_literals_in_ctor (init);
4716 if (newval == value)
4717 continue;
4719 if (ctor == orig_ctor)
4721 ctor = copy_node (orig_ctor);
4722 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4723 elts = CONSTRUCTOR_ELTS (ctor);
4725 (*elts)[idx].value = newval;
4727 return ctor;
4730 /* A subroutine of gimplify_modify_expr. Break out elements of a
4731 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4733 Note that we still need to clear any elements that don't have explicit
4734 initializers, so if not all elements are initialized we keep the
4735 original MODIFY_EXPR, we just remove all of the constructor elements.
4737 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4738 GS_ERROR if we would have to create a temporary when gimplifying
4739 this constructor. Otherwise, return GS_OK.
4741 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4743 static enum gimplify_status
4744 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4745 bool want_value, bool notify_temp_creation)
4747 tree object, ctor, type;
4748 enum gimplify_status ret;
4749 vec<constructor_elt, va_gc> *elts;
4751 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4753 if (!notify_temp_creation)
4755 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4756 is_gimple_lvalue, fb_lvalue);
4757 if (ret == GS_ERROR)
4758 return ret;
4761 object = TREE_OPERAND (*expr_p, 0);
4762 ctor = TREE_OPERAND (*expr_p, 1)
4763 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4764 type = TREE_TYPE (ctor);
4765 elts = CONSTRUCTOR_ELTS (ctor);
4766 ret = GS_ALL_DONE;
4768 switch (TREE_CODE (type))
4770 case RECORD_TYPE:
4771 case UNION_TYPE:
4772 case QUAL_UNION_TYPE:
4773 case ARRAY_TYPE:
4775 struct gimplify_init_ctor_preeval_data preeval_data;
4776 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4777 bool cleared, complete_p, valid_const_initializer;
4779 /* Aggregate types must lower constructors to initialization of
4780 individual elements. The exception is that a CONSTRUCTOR node
4781 with no elements indicates zero-initialization of the whole. */
4782 if (vec_safe_is_empty (elts))
4784 if (notify_temp_creation)
4785 return GS_OK;
4786 break;
4789 /* Fetch information about the constructor to direct later processing.
4790 We might want to make static versions of it in various cases, and
4791 can only do so if it known to be a valid constant initializer. */
4792 valid_const_initializer
4793 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4794 &num_ctor_elements, &complete_p);
4796 /* If a const aggregate variable is being initialized, then it
4797 should never be a lose to promote the variable to be static. */
4798 if (valid_const_initializer
4799 && num_nonzero_elements > 1
4800 && TREE_READONLY (object)
4801 && VAR_P (object)
4802 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4804 if (notify_temp_creation)
4805 return GS_ERROR;
4806 DECL_INITIAL (object) = ctor;
4807 TREE_STATIC (object) = 1;
4808 if (!DECL_NAME (object))
4809 DECL_NAME (object) = create_tmp_var_name ("C");
4810 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4812 /* ??? C++ doesn't automatically append a .<number> to the
4813 assembler name, and even when it does, it looks at FE private
4814 data structures to figure out what that number should be,
4815 which are not set for this variable. I suppose this is
4816 important for local statics for inline functions, which aren't
4817 "local" in the object file sense. So in order to get a unique
4818 TU-local symbol, we must invoke the lhd version now. */
4819 lhd_set_decl_assembler_name (object);
4821 *expr_p = NULL_TREE;
4822 break;
4825 /* If there are "lots" of initialized elements, even discounting
4826 those that are not address constants (and thus *must* be
4827 computed at runtime), then partition the constructor into
4828 constant and non-constant parts. Block copy the constant
4829 parts in, then generate code for the non-constant parts. */
4830 /* TODO. There's code in cp/typeck.c to do this. */
4832 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4833 /* store_constructor will ignore the clearing of variable-sized
4834 objects. Initializers for such objects must explicitly set
4835 every field that needs to be set. */
4836 cleared = false;
4837 else if (!complete_p)
4838 /* If the constructor isn't complete, clear the whole object
4839 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4841 ??? This ought not to be needed. For any element not present
4842 in the initializer, we should simply set them to zero. Except
4843 we'd need to *find* the elements that are not present, and that
4844 requires trickery to avoid quadratic compile-time behavior in
4845 large cases or excessive memory use in small cases. */
4846 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4847 else if (num_ctor_elements - num_nonzero_elements
4848 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4849 && num_nonzero_elements < num_ctor_elements / 4)
4850 /* If there are "lots" of zeros, it's more efficient to clear
4851 the memory and then set the nonzero elements. */
4852 cleared = true;
4853 else
4854 cleared = false;
4856 /* If there are "lots" of initialized elements, and all of them
4857 are valid address constants, then the entire initializer can
4858 be dropped to memory, and then memcpy'd out. Don't do this
4859 for sparse arrays, though, as it's more efficient to follow
4860 the standard CONSTRUCTOR behavior of memset followed by
4861 individual element initialization. Also don't do this for small
4862 all-zero initializers (which aren't big enough to merit
4863 clearing), and don't try to make bitwise copies of
4864 TREE_ADDRESSABLE types. */
4866 if (valid_const_initializer
4867 && !(cleared || num_nonzero_elements == 0)
4868 && !TREE_ADDRESSABLE (type))
4870 HOST_WIDE_INT size = int_size_in_bytes (type);
4871 unsigned int align;
4873 /* ??? We can still get unbounded array types, at least
4874 from the C++ front end. This seems wrong, but attempt
4875 to work around it for now. */
4876 if (size < 0)
4878 size = int_size_in_bytes (TREE_TYPE (object));
4879 if (size >= 0)
4880 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4883 /* Find the maximum alignment we can assume for the object. */
4884 /* ??? Make use of DECL_OFFSET_ALIGN. */
4885 if (DECL_P (object))
4886 align = DECL_ALIGN (object);
4887 else
4888 align = TYPE_ALIGN (type);
4890 /* Do a block move either if the size is so small as to make
4891 each individual move a sub-unit move on average, or if it
4892 is so large as to make individual moves inefficient. */
4893 if (size > 0
4894 && num_nonzero_elements > 1
4895 && (size < num_nonzero_elements
4896 || !can_move_by_pieces (size, align)))
4898 if (notify_temp_creation)
4899 return GS_ERROR;
4901 walk_tree (&ctor, force_labels_r, NULL, NULL);
4902 ctor = tree_output_constant_def (ctor);
4903 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4904 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4905 TREE_OPERAND (*expr_p, 1) = ctor;
4907 /* This is no longer an assignment of a CONSTRUCTOR, but
4908 we still may have processing to do on the LHS. So
4909 pretend we didn't do anything here to let that happen. */
4910 return GS_UNHANDLED;
4914 /* If the target is volatile, we have non-zero elements and more than
4915 one field to assign, initialize the target from a temporary. */
4916 if (TREE_THIS_VOLATILE (object)
4917 && !TREE_ADDRESSABLE (type)
4918 && num_nonzero_elements > 0
4919 && vec_safe_length (elts) > 1)
4921 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4922 TREE_OPERAND (*expr_p, 0) = temp;
4923 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4924 *expr_p,
4925 build2 (MODIFY_EXPR, void_type_node,
4926 object, temp));
4927 return GS_OK;
4930 if (notify_temp_creation)
4931 return GS_OK;
4933 /* If there are nonzero elements and if needed, pre-evaluate to capture
4934 elements overlapping with the lhs into temporaries. We must do this
4935 before clearing to fetch the values before they are zeroed-out. */
4936 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4938 preeval_data.lhs_base_decl = get_base_address (object);
4939 if (!DECL_P (preeval_data.lhs_base_decl))
4940 preeval_data.lhs_base_decl = NULL;
4941 preeval_data.lhs_alias_set = get_alias_set (object);
4943 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4944 pre_p, post_p, &preeval_data);
4947 bool ctor_has_side_effects_p
4948 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4950 if (cleared)
4952 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4953 Note that we still have to gimplify, in order to handle the
4954 case of variable sized types. Avoid shared tree structures. */
4955 CONSTRUCTOR_ELTS (ctor) = NULL;
4956 TREE_SIDE_EFFECTS (ctor) = 0;
4957 object = unshare_expr (object);
4958 gimplify_stmt (expr_p, pre_p);
4961 /* If we have not block cleared the object, or if there are nonzero
4962 elements in the constructor, or if the constructor has side effects,
4963 add assignments to the individual scalar fields of the object. */
4964 if (!cleared
4965 || num_nonzero_elements > 0
4966 || ctor_has_side_effects_p)
4967 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4969 *expr_p = NULL_TREE;
4971 break;
4973 case COMPLEX_TYPE:
4975 tree r, i;
4977 if (notify_temp_creation)
4978 return GS_OK;
4980 /* Extract the real and imaginary parts out of the ctor. */
4981 gcc_assert (elts->length () == 2);
4982 r = (*elts)[0].value;
4983 i = (*elts)[1].value;
4984 if (r == NULL || i == NULL)
4986 tree zero = build_zero_cst (TREE_TYPE (type));
4987 if (r == NULL)
4988 r = zero;
4989 if (i == NULL)
4990 i = zero;
4993 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4994 represent creation of a complex value. */
4995 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4997 ctor = build_complex (type, r, i);
4998 TREE_OPERAND (*expr_p, 1) = ctor;
5000 else
5002 ctor = build2 (COMPLEX_EXPR, type, r, i);
5003 TREE_OPERAND (*expr_p, 1) = ctor;
5004 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5005 pre_p,
5006 post_p,
5007 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5008 fb_rvalue);
5011 break;
5013 case VECTOR_TYPE:
5015 unsigned HOST_WIDE_INT ix;
5016 constructor_elt *ce;
5018 if (notify_temp_creation)
5019 return GS_OK;
5021 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5022 if (TREE_CONSTANT (ctor))
5024 bool constant_p = true;
5025 tree value;
5027 /* Even when ctor is constant, it might contain non-*_CST
5028 elements, such as addresses or trapping values like
5029 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5030 in VECTOR_CST nodes. */
5031 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5032 if (!CONSTANT_CLASS_P (value))
5034 constant_p = false;
5035 break;
5038 if (constant_p)
5040 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5041 break;
5044 TREE_CONSTANT (ctor) = 0;
5047 /* Vector types use CONSTRUCTOR all the way through gimple
5048 compilation as a general initializer. */
5049 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5051 enum gimplify_status tret;
5052 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5053 fb_rvalue);
5054 if (tret == GS_ERROR)
5055 ret = GS_ERROR;
5056 else if (TREE_STATIC (ctor)
5057 && !initializer_constant_valid_p (ce->value,
5058 TREE_TYPE (ce->value)))
5059 TREE_STATIC (ctor) = 0;
5061 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5062 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5064 break;
5066 default:
5067 /* So how did we get a CONSTRUCTOR for a scalar type? */
5068 gcc_unreachable ();
5071 if (ret == GS_ERROR)
5072 return GS_ERROR;
5073 /* If we have gimplified both sides of the initializer but have
5074 not emitted an assignment, do so now. */
5075 if (*expr_p)
5077 tree lhs = TREE_OPERAND (*expr_p, 0);
5078 tree rhs = TREE_OPERAND (*expr_p, 1);
5079 if (want_value && object == lhs)
5080 lhs = unshare_expr (lhs);
5081 gassign *init = gimple_build_assign (lhs, rhs);
5082 gimplify_seq_add_stmt (pre_p, init);
5084 if (want_value)
5086 *expr_p = object;
5087 return GS_OK;
5089 else
5091 *expr_p = NULL;
5092 return GS_ALL_DONE;
5096 /* Given a pointer value OP0, return a simplified version of an
5097 indirection through OP0, or NULL_TREE if no simplification is
5098 possible. This may only be applied to a rhs of an expression.
5099 Note that the resulting type may be different from the type pointed
5100 to in the sense that it is still compatible from the langhooks
5101 point of view. */
5103 static tree
5104 gimple_fold_indirect_ref_rhs (tree t)
5106 return gimple_fold_indirect_ref (t);
5109 /* Subroutine of gimplify_modify_expr to do simplifications of
5110 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5111 something changes. */
5113 static enum gimplify_status
5114 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5115 gimple_seq *pre_p, gimple_seq *post_p,
5116 bool want_value)
5118 enum gimplify_status ret = GS_UNHANDLED;
5119 bool changed;
5123 changed = false;
5124 switch (TREE_CODE (*from_p))
5126 case VAR_DECL:
5127 /* If we're assigning from a read-only variable initialized with
5128 a constructor, do the direct assignment from the constructor,
5129 but only if neither source nor target are volatile since this
5130 latter assignment might end up being done on a per-field basis. */
5131 if (DECL_INITIAL (*from_p)
5132 && TREE_READONLY (*from_p)
5133 && !TREE_THIS_VOLATILE (*from_p)
5134 && !TREE_THIS_VOLATILE (*to_p)
5135 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5137 tree old_from = *from_p;
5138 enum gimplify_status subret;
5140 /* Move the constructor into the RHS. */
5141 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5143 /* Let's see if gimplify_init_constructor will need to put
5144 it in memory. */
5145 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5146 false, true);
5147 if (subret == GS_ERROR)
5149 /* If so, revert the change. */
5150 *from_p = old_from;
5152 else
5154 ret = GS_OK;
5155 changed = true;
5158 break;
5159 case INDIRECT_REF:
5161 /* If we have code like
5163 *(const A*)(A*)&x
5165 where the type of "x" is a (possibly cv-qualified variant
5166 of "A"), treat the entire expression as identical to "x".
5167 This kind of code arises in C++ when an object is bound
5168 to a const reference, and if "x" is a TARGET_EXPR we want
5169 to take advantage of the optimization below. */
5170 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5171 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5172 if (t)
5174 if (TREE_THIS_VOLATILE (t) != volatile_p)
5176 if (DECL_P (t))
5177 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5178 build_fold_addr_expr (t));
5179 if (REFERENCE_CLASS_P (t))
5180 TREE_THIS_VOLATILE (t) = volatile_p;
5182 *from_p = t;
5183 ret = GS_OK;
5184 changed = true;
5186 break;
5189 case TARGET_EXPR:
5191 /* If we are initializing something from a TARGET_EXPR, strip the
5192 TARGET_EXPR and initialize it directly, if possible. This can't
5193 be done if the initializer is void, since that implies that the
5194 temporary is set in some non-trivial way.
5196 ??? What about code that pulls out the temp and uses it
5197 elsewhere? I think that such code never uses the TARGET_EXPR as
5198 an initializer. If I'm wrong, we'll die because the temp won't
5199 have any RTL. In that case, I guess we'll need to replace
5200 references somehow. */
5201 tree init = TARGET_EXPR_INITIAL (*from_p);
5203 if (init
5204 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5205 || !TARGET_EXPR_NO_ELIDE (*from_p))
5206 && !VOID_TYPE_P (TREE_TYPE (init)))
5208 *from_p = init;
5209 ret = GS_OK;
5210 changed = true;
5213 break;
5215 case COMPOUND_EXPR:
5216 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5217 caught. */
5218 gimplify_compound_expr (from_p, pre_p, true);
5219 ret = GS_OK;
5220 changed = true;
5221 break;
5223 case CONSTRUCTOR:
5224 /* If we already made some changes, let the front end have a
5225 crack at this before we break it down. */
5226 if (ret != GS_UNHANDLED)
5227 break;
5228 /* If we're initializing from a CONSTRUCTOR, break this into
5229 individual MODIFY_EXPRs. */
5230 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5231 false);
5233 case COND_EXPR:
5234 /* If we're assigning to a non-register type, push the assignment
5235 down into the branches. This is mandatory for ADDRESSABLE types,
5236 since we cannot generate temporaries for such, but it saves a
5237 copy in other cases as well. */
5238 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5240 /* This code should mirror the code in gimplify_cond_expr. */
5241 enum tree_code code = TREE_CODE (*expr_p);
5242 tree cond = *from_p;
5243 tree result = *to_p;
5245 ret = gimplify_expr (&result, pre_p, post_p,
5246 is_gimple_lvalue, fb_lvalue);
5247 if (ret != GS_ERROR)
5248 ret = GS_OK;
5250 /* If we are going to write RESULT more than once, clear
5251 TREE_READONLY flag, otherwise we might incorrectly promote
5252 the variable to static const and initialize it at compile
5253 time in one of the branches. */
5254 if (VAR_P (result)
5255 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5256 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5257 TREE_READONLY (result) = 0;
5258 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5259 TREE_OPERAND (cond, 1)
5260 = build2 (code, void_type_node, result,
5261 TREE_OPERAND (cond, 1));
5262 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5263 TREE_OPERAND (cond, 2)
5264 = build2 (code, void_type_node, unshare_expr (result),
5265 TREE_OPERAND (cond, 2));
5267 TREE_TYPE (cond) = void_type_node;
5268 recalculate_side_effects (cond);
5270 if (want_value)
5272 gimplify_and_add (cond, pre_p);
5273 *expr_p = unshare_expr (result);
5275 else
5276 *expr_p = cond;
5277 return ret;
5279 break;
5281 case CALL_EXPR:
5282 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5283 return slot so that we don't generate a temporary. */
5284 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5285 && aggregate_value_p (*from_p, *from_p))
5287 bool use_target;
5289 if (!(rhs_predicate_for (*to_p))(*from_p))
5290 /* If we need a temporary, *to_p isn't accurate. */
5291 use_target = false;
5292 /* It's OK to use the return slot directly unless it's an NRV. */
5293 else if (TREE_CODE (*to_p) == RESULT_DECL
5294 && DECL_NAME (*to_p) == NULL_TREE
5295 && needs_to_live_in_memory (*to_p))
5296 use_target = true;
5297 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5298 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5299 /* Don't force regs into memory. */
5300 use_target = false;
5301 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5302 /* It's OK to use the target directly if it's being
5303 initialized. */
5304 use_target = true;
5305 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5306 != INTEGER_CST)
5307 /* Always use the target and thus RSO for variable-sized types.
5308 GIMPLE cannot deal with a variable-sized assignment
5309 embedded in a call statement. */
5310 use_target = true;
5311 else if (TREE_CODE (*to_p) != SSA_NAME
5312 && (!is_gimple_variable (*to_p)
5313 || needs_to_live_in_memory (*to_p)))
5314 /* Don't use the original target if it's already addressable;
5315 if its address escapes, and the called function uses the
5316 NRV optimization, a conforming program could see *to_p
5317 change before the called function returns; see c++/19317.
5318 When optimizing, the return_slot pass marks more functions
5319 as safe after we have escape info. */
5320 use_target = false;
5321 else
5322 use_target = true;
5324 if (use_target)
5326 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5327 mark_addressable (*to_p);
5330 break;
5332 case WITH_SIZE_EXPR:
5333 /* Likewise for calls that return an aggregate of non-constant size,
5334 since we would not be able to generate a temporary at all. */
5335 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5337 *from_p = TREE_OPERAND (*from_p, 0);
5338 /* We don't change ret in this case because the
5339 WITH_SIZE_EXPR might have been added in
5340 gimplify_modify_expr, so returning GS_OK would lead to an
5341 infinite loop. */
5342 changed = true;
5344 break;
5346 /* If we're initializing from a container, push the initialization
5347 inside it. */
5348 case CLEANUP_POINT_EXPR:
5349 case BIND_EXPR:
5350 case STATEMENT_LIST:
5352 tree wrap = *from_p;
5353 tree t;
5355 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5356 fb_lvalue);
5357 if (ret != GS_ERROR)
5358 ret = GS_OK;
5360 t = voidify_wrapper_expr (wrap, *expr_p);
5361 gcc_assert (t == *expr_p);
5363 if (want_value)
5365 gimplify_and_add (wrap, pre_p);
5366 *expr_p = unshare_expr (*to_p);
5368 else
5369 *expr_p = wrap;
5370 return GS_OK;
5373 case COMPOUND_LITERAL_EXPR:
5375 tree complit = TREE_OPERAND (*expr_p, 1);
5376 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5377 tree decl = DECL_EXPR_DECL (decl_s);
5378 tree init = DECL_INITIAL (decl);
5380 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5381 into struct T x = { 0, 1, 2 } if the address of the
5382 compound literal has never been taken. */
5383 if (!TREE_ADDRESSABLE (complit)
5384 && !TREE_ADDRESSABLE (decl)
5385 && init)
5387 *expr_p = copy_node (*expr_p);
5388 TREE_OPERAND (*expr_p, 1) = init;
5389 return GS_OK;
5393 default:
5394 break;
5397 while (changed);
5399 return ret;
5403 /* Return true if T looks like a valid GIMPLE statement. */
5405 static bool
5406 is_gimple_stmt (tree t)
5408 const enum tree_code code = TREE_CODE (t);
5410 switch (code)
5412 case NOP_EXPR:
5413 /* The only valid NOP_EXPR is the empty statement. */
5414 return IS_EMPTY_STMT (t);
5416 case BIND_EXPR:
5417 case COND_EXPR:
5418 /* These are only valid if they're void. */
5419 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5421 case SWITCH_EXPR:
5422 case GOTO_EXPR:
5423 case RETURN_EXPR:
5424 case LABEL_EXPR:
5425 case CASE_LABEL_EXPR:
5426 case TRY_CATCH_EXPR:
5427 case TRY_FINALLY_EXPR:
5428 case EH_FILTER_EXPR:
5429 case CATCH_EXPR:
5430 case ASM_EXPR:
5431 case STATEMENT_LIST:
5432 case OACC_PARALLEL:
5433 case OACC_KERNELS:
5434 case OACC_DATA:
5435 case OACC_HOST_DATA:
5436 case OACC_DECLARE:
5437 case OACC_UPDATE:
5438 case OACC_ENTER_DATA:
5439 case OACC_EXIT_DATA:
5440 case OACC_CACHE:
5441 case OMP_PARALLEL:
5442 case OMP_FOR:
5443 case OMP_SIMD:
5444 case OMP_DISTRIBUTE:
5445 case OACC_LOOP:
5446 case OMP_SECTIONS:
5447 case OMP_SECTION:
5448 case OMP_SINGLE:
5449 case OMP_MASTER:
5450 case OMP_TASKGROUP:
5451 case OMP_ORDERED:
5452 case OMP_CRITICAL:
5453 case OMP_TASK:
5454 case OMP_TARGET:
5455 case OMP_TARGET_DATA:
5456 case OMP_TARGET_UPDATE:
5457 case OMP_TARGET_ENTER_DATA:
5458 case OMP_TARGET_EXIT_DATA:
5459 case OMP_TASKLOOP:
5460 case OMP_TEAMS:
5461 /* These are always void. */
5462 return true;
5464 case CALL_EXPR:
5465 case MODIFY_EXPR:
5466 case PREDICT_EXPR:
5467 /* These are valid regardless of their type. */
5468 return true;
5470 default:
5471 return false;
5476 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5477 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5478 DECL_GIMPLE_REG_P set.
5480 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5481 other, unmodified part of the complex object just before the total store.
5482 As a consequence, if the object is still uninitialized, an undefined value
5483 will be loaded into a register, which may result in a spurious exception
5484 if the register is floating-point and the value happens to be a signaling
5485 NaN for example. Then the fully-fledged complex operations lowering pass
5486 followed by a DCE pass are necessary in order to fix things up. */
5488 static enum gimplify_status
5489 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5490 bool want_value)
5492 enum tree_code code, ocode;
5493 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5495 lhs = TREE_OPERAND (*expr_p, 0);
5496 rhs = TREE_OPERAND (*expr_p, 1);
5497 code = TREE_CODE (lhs);
5498 lhs = TREE_OPERAND (lhs, 0);
5500 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5501 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5502 TREE_NO_WARNING (other) = 1;
5503 other = get_formal_tmp_var (other, pre_p);
5505 realpart = code == REALPART_EXPR ? rhs : other;
5506 imagpart = code == REALPART_EXPR ? other : rhs;
5508 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5509 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5510 else
5511 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5513 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5514 *expr_p = (want_value) ? rhs : NULL_TREE;
5516 return GS_ALL_DONE;
5519 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5521 modify_expr
5522 : varname '=' rhs
5523 | '*' ID '=' rhs
5525 PRE_P points to the list where side effects that must happen before
5526 *EXPR_P should be stored.
5528 POST_P points to the list where side effects that must happen after
5529 *EXPR_P should be stored.
5531 WANT_VALUE is nonzero iff we want to use the value of this expression
5532 in another expression. */
5534 static enum gimplify_status
5535 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5536 bool want_value)
5538 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5539 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5540 enum gimplify_status ret = GS_UNHANDLED;
5541 gimple *assign;
5542 location_t loc = EXPR_LOCATION (*expr_p);
5543 gimple_stmt_iterator gsi;
5545 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5546 || TREE_CODE (*expr_p) == INIT_EXPR);
5548 /* Trying to simplify a clobber using normal logic doesn't work,
5549 so handle it here. */
5550 if (TREE_CLOBBER_P (*from_p))
5552 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5553 if (ret == GS_ERROR)
5554 return ret;
5555 gcc_assert (!want_value);
5556 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5558 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5559 pre_p, post_p);
5560 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5562 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5563 *expr_p = NULL;
5564 return GS_ALL_DONE;
5567 /* Insert pointer conversions required by the middle-end that are not
5568 required by the frontend. This fixes middle-end type checking for
5569 for example gcc.dg/redecl-6.c. */
5570 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5572 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5573 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5574 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5577 /* See if any simplifications can be done based on what the RHS is. */
5578 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5579 want_value);
5580 if (ret != GS_UNHANDLED)
5581 return ret;
5583 /* For zero sized types only gimplify the left hand side and right hand
5584 side as statements and throw away the assignment. Do this after
5585 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5586 types properly. */
5587 if (zero_sized_type (TREE_TYPE (*from_p))
5588 && !want_value
5589 /* Don't do this for calls that return addressable types, expand_call
5590 relies on those having a lhs. */
5591 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5592 && TREE_CODE (*from_p) == CALL_EXPR))
5594 gimplify_stmt (from_p, pre_p);
5595 gimplify_stmt (to_p, pre_p);
5596 *expr_p = NULL_TREE;
5597 return GS_ALL_DONE;
5600 /* If the value being copied is of variable width, compute the length
5601 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5602 before gimplifying any of the operands so that we can resolve any
5603 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5604 the size of the expression to be copied, not of the destination, so
5605 that is what we must do here. */
5606 maybe_with_size_expr (from_p);
5608 /* As a special case, we have to temporarily allow for assignments
5609 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5610 a toplevel statement, when gimplifying the GENERIC expression
5611 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5612 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5614 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5615 prevent gimplify_expr from trying to create a new temporary for
5616 foo's LHS, we tell it that it should only gimplify until it
5617 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5618 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5619 and all we need to do here is set 'a' to be its LHS. */
5621 /* Gimplify the RHS first for C++17 and bug 71104. */
5622 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5623 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5624 if (ret == GS_ERROR)
5625 return ret;
5627 /* Then gimplify the LHS. */
5628 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5629 twice we have to make sure to gimplify into non-SSA as otherwise
5630 the abnormal edge added later will make those defs not dominate
5631 their uses.
5632 ??? Technically this applies only to the registers used in the
5633 resulting non-register *TO_P. */
5634 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5635 if (saved_into_ssa
5636 && TREE_CODE (*from_p) == CALL_EXPR
5637 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5638 gimplify_ctxp->into_ssa = false;
5639 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5640 gimplify_ctxp->into_ssa = saved_into_ssa;
5641 if (ret == GS_ERROR)
5642 return ret;
5644 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5645 guess for the predicate was wrong. */
5646 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5647 if (final_pred != initial_pred)
5649 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5650 if (ret == GS_ERROR)
5651 return ret;
5654 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5655 size as argument to the call. */
5656 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5658 tree call = TREE_OPERAND (*from_p, 0);
5659 tree vlasize = TREE_OPERAND (*from_p, 1);
5661 if (TREE_CODE (call) == CALL_EXPR
5662 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5664 int nargs = call_expr_nargs (call);
5665 tree type = TREE_TYPE (call);
5666 tree ap = CALL_EXPR_ARG (call, 0);
5667 tree tag = CALL_EXPR_ARG (call, 1);
5668 tree aptag = CALL_EXPR_ARG (call, 2);
5669 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5670 IFN_VA_ARG, type,
5671 nargs + 1, ap, tag,
5672 aptag, vlasize);
5673 TREE_OPERAND (*from_p, 0) = newcall;
5677 /* Now see if the above changed *from_p to something we handle specially. */
5678 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5679 want_value);
5680 if (ret != GS_UNHANDLED)
5681 return ret;
5683 /* If we've got a variable sized assignment between two lvalues (i.e. does
5684 not involve a call), then we can make things a bit more straightforward
5685 by converting the assignment to memcpy or memset. */
5686 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5688 tree from = TREE_OPERAND (*from_p, 0);
5689 tree size = TREE_OPERAND (*from_p, 1);
5691 if (TREE_CODE (from) == CONSTRUCTOR)
5692 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5694 if (is_gimple_addressable (from))
5696 *from_p = from;
5697 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5698 pre_p);
5702 /* Transform partial stores to non-addressable complex variables into
5703 total stores. This allows us to use real instead of virtual operands
5704 for these variables, which improves optimization. */
5705 if ((TREE_CODE (*to_p) == REALPART_EXPR
5706 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5707 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5708 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5710 /* Try to alleviate the effects of the gimplification creating artificial
5711 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5712 make sure not to create DECL_DEBUG_EXPR links across functions. */
5713 if (!gimplify_ctxp->into_ssa
5714 && VAR_P (*from_p)
5715 && DECL_IGNORED_P (*from_p)
5716 && DECL_P (*to_p)
5717 && !DECL_IGNORED_P (*to_p)
5718 && decl_function_context (*to_p) == current_function_decl
5719 && decl_function_context (*from_p) == current_function_decl)
5721 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5722 DECL_NAME (*from_p)
5723 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5724 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5725 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5728 if (want_value && TREE_THIS_VOLATILE (*to_p))
5729 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5731 if (TREE_CODE (*from_p) == CALL_EXPR)
5733 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5734 instead of a GIMPLE_ASSIGN. */
5735 gcall *call_stmt;
5736 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5738 /* Gimplify internal functions created in the FEs. */
5739 int nargs = call_expr_nargs (*from_p), i;
5740 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5741 auto_vec<tree> vargs (nargs);
5743 for (i = 0; i < nargs; i++)
5745 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5746 EXPR_LOCATION (*from_p));
5747 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5749 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5750 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5751 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5753 else
5755 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5756 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5757 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5758 tree fndecl = get_callee_fndecl (*from_p);
5759 if (fndecl
5760 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5761 && call_expr_nargs (*from_p) == 3)
5762 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5763 CALL_EXPR_ARG (*from_p, 0),
5764 CALL_EXPR_ARG (*from_p, 1),
5765 CALL_EXPR_ARG (*from_p, 2));
5766 else
5768 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5771 notice_special_calls (call_stmt);
5772 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5773 gimple_call_set_lhs (call_stmt, *to_p);
5774 else if (TREE_CODE (*to_p) == SSA_NAME)
5775 /* The above is somewhat premature, avoid ICEing later for a
5776 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5777 ??? This doesn't make it a default-def. */
5778 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5780 assign = call_stmt;
5782 else
5784 assign = gimple_build_assign (*to_p, *from_p);
5785 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5786 if (COMPARISON_CLASS_P (*from_p))
5787 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5790 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5792 /* We should have got an SSA name from the start. */
5793 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5794 || ! gimple_in_ssa_p (cfun));
5797 gimplify_seq_add_stmt (pre_p, assign);
5798 gsi = gsi_last (*pre_p);
5799 maybe_fold_stmt (&gsi);
5801 if (want_value)
5803 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5804 return GS_OK;
5806 else
5807 *expr_p = NULL;
5809 return GS_ALL_DONE;
5812 /* Gimplify a comparison between two variable-sized objects. Do this
5813 with a call to BUILT_IN_MEMCMP. */
5815 static enum gimplify_status
5816 gimplify_variable_sized_compare (tree *expr_p)
5818 location_t loc = EXPR_LOCATION (*expr_p);
5819 tree op0 = TREE_OPERAND (*expr_p, 0);
5820 tree op1 = TREE_OPERAND (*expr_p, 1);
5821 tree t, arg, dest, src, expr;
5823 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5824 arg = unshare_expr (arg);
5825 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5826 src = build_fold_addr_expr_loc (loc, op1);
5827 dest = build_fold_addr_expr_loc (loc, op0);
5828 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5829 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5831 expr
5832 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5833 SET_EXPR_LOCATION (expr, loc);
5834 *expr_p = expr;
5836 return GS_OK;
5839 /* Gimplify a comparison between two aggregate objects of integral scalar
5840 mode as a comparison between the bitwise equivalent scalar values. */
5842 static enum gimplify_status
5843 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5845 location_t loc = EXPR_LOCATION (*expr_p);
5846 tree op0 = TREE_OPERAND (*expr_p, 0);
5847 tree op1 = TREE_OPERAND (*expr_p, 1);
5849 tree type = TREE_TYPE (op0);
5850 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5852 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5853 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5855 *expr_p
5856 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5858 return GS_OK;
5861 /* Gimplify an expression sequence. This function gimplifies each
5862 expression and rewrites the original expression with the last
5863 expression of the sequence in GIMPLE form.
5865 PRE_P points to the list where the side effects for all the
5866 expressions in the sequence will be emitted.
5868 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5870 static enum gimplify_status
5871 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5873 tree t = *expr_p;
5877 tree *sub_p = &TREE_OPERAND (t, 0);
5879 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5880 gimplify_compound_expr (sub_p, pre_p, false);
5881 else
5882 gimplify_stmt (sub_p, pre_p);
5884 t = TREE_OPERAND (t, 1);
5886 while (TREE_CODE (t) == COMPOUND_EXPR);
5888 *expr_p = t;
5889 if (want_value)
5890 return GS_OK;
5891 else
5893 gimplify_stmt (expr_p, pre_p);
5894 return GS_ALL_DONE;
5898 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5899 gimplify. After gimplification, EXPR_P will point to a new temporary
5900 that holds the original value of the SAVE_EXPR node.
5902 PRE_P points to the list where side effects that must happen before
5903 *EXPR_P should be stored. */
5905 static enum gimplify_status
5906 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5908 enum gimplify_status ret = GS_ALL_DONE;
5909 tree val;
5911 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5912 val = TREE_OPERAND (*expr_p, 0);
5914 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5915 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5917 /* The operand may be a void-valued expression. It is
5918 being executed only for its side-effects. */
5919 if (TREE_TYPE (val) == void_type_node)
5921 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5922 is_gimple_stmt, fb_none);
5923 val = NULL;
5925 else
5926 /* The temporary may not be an SSA name as later abnormal and EH
5927 control flow may invalidate use/def domination. When in SSA
5928 form then assume there are no such issues and SAVE_EXPRs only
5929 appear via GENERIC foldings. */
5930 val = get_initialized_tmp_var (val, pre_p, post_p,
5931 gimple_in_ssa_p (cfun));
5933 TREE_OPERAND (*expr_p, 0) = val;
5934 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5937 *expr_p = val;
5939 return ret;
5942 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5944 unary_expr
5945 : ...
5946 | '&' varname
5949 PRE_P points to the list where side effects that must happen before
5950 *EXPR_P should be stored.
5952 POST_P points to the list where side effects that must happen after
5953 *EXPR_P should be stored. */
5955 static enum gimplify_status
5956 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5958 tree expr = *expr_p;
5959 tree op0 = TREE_OPERAND (expr, 0);
5960 enum gimplify_status ret;
5961 location_t loc = EXPR_LOCATION (*expr_p);
5963 switch (TREE_CODE (op0))
5965 case INDIRECT_REF:
5966 do_indirect_ref:
5967 /* Check if we are dealing with an expression of the form '&*ptr'.
5968 While the front end folds away '&*ptr' into 'ptr', these
5969 expressions may be generated internally by the compiler (e.g.,
5970 builtins like __builtin_va_end). */
5971 /* Caution: the silent array decomposition semantics we allow for
5972 ADDR_EXPR means we can't always discard the pair. */
5973 /* Gimplification of the ADDR_EXPR operand may drop
5974 cv-qualification conversions, so make sure we add them if
5975 needed. */
5977 tree op00 = TREE_OPERAND (op0, 0);
5978 tree t_expr = TREE_TYPE (expr);
5979 tree t_op00 = TREE_TYPE (op00);
5981 if (!useless_type_conversion_p (t_expr, t_op00))
5982 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5983 *expr_p = op00;
5984 ret = GS_OK;
5986 break;
5988 case VIEW_CONVERT_EXPR:
5989 /* Take the address of our operand and then convert it to the type of
5990 this ADDR_EXPR.
5992 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5993 all clear. The impact of this transformation is even less clear. */
5995 /* If the operand is a useless conversion, look through it. Doing so
5996 guarantees that the ADDR_EXPR and its operand will remain of the
5997 same type. */
5998 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5999 op0 = TREE_OPERAND (op0, 0);
6001 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6002 build_fold_addr_expr_loc (loc,
6003 TREE_OPERAND (op0, 0)));
6004 ret = GS_OK;
6005 break;
6007 case MEM_REF:
6008 if (integer_zerop (TREE_OPERAND (op0, 1)))
6009 goto do_indirect_ref;
6011 /* fall through */
6013 default:
6014 /* If we see a call to a declared builtin or see its address
6015 being taken (we can unify those cases here) then we can mark
6016 the builtin for implicit generation by GCC. */
6017 if (TREE_CODE (op0) == FUNCTION_DECL
6018 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6019 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6020 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6022 /* We use fb_either here because the C frontend sometimes takes
6023 the address of a call that returns a struct; see
6024 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6025 the implied temporary explicit. */
6027 /* Make the operand addressable. */
6028 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6029 is_gimple_addressable, fb_either);
6030 if (ret == GS_ERROR)
6031 break;
6033 /* Then mark it. Beware that it may not be possible to do so directly
6034 if a temporary has been created by the gimplification. */
6035 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6037 op0 = TREE_OPERAND (expr, 0);
6039 /* For various reasons, the gimplification of the expression
6040 may have made a new INDIRECT_REF. */
6041 if (TREE_CODE (op0) == INDIRECT_REF)
6042 goto do_indirect_ref;
6044 mark_addressable (TREE_OPERAND (expr, 0));
6046 /* The FEs may end up building ADDR_EXPRs early on a decl with
6047 an incomplete type. Re-build ADDR_EXPRs in canonical form
6048 here. */
6049 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6050 *expr_p = build_fold_addr_expr (op0);
6052 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6053 recompute_tree_invariant_for_addr_expr (*expr_p);
6055 /* If we re-built the ADDR_EXPR add a conversion to the original type
6056 if required. */
6057 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6058 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6060 break;
6063 return ret;
6066 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6067 value; output operands should be a gimple lvalue. */
6069 static enum gimplify_status
6070 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6072 tree expr;
6073 int noutputs;
6074 const char **oconstraints;
6075 int i;
6076 tree link;
6077 const char *constraint;
6078 bool allows_mem, allows_reg, is_inout;
6079 enum gimplify_status ret, tret;
6080 gasm *stmt;
6081 vec<tree, va_gc> *inputs;
6082 vec<tree, va_gc> *outputs;
6083 vec<tree, va_gc> *clobbers;
6084 vec<tree, va_gc> *labels;
6085 tree link_next;
6087 expr = *expr_p;
6088 noutputs = list_length (ASM_OUTPUTS (expr));
6089 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6091 inputs = NULL;
6092 outputs = NULL;
6093 clobbers = NULL;
6094 labels = NULL;
6096 ret = GS_ALL_DONE;
6097 link_next = NULL_TREE;
6098 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6100 bool ok;
6101 size_t constraint_len;
6103 link_next = TREE_CHAIN (link);
6105 oconstraints[i]
6106 = constraint
6107 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6108 constraint_len = strlen (constraint);
6109 if (constraint_len == 0)
6110 continue;
6112 ok = parse_output_constraint (&constraint, i, 0, 0,
6113 &allows_mem, &allows_reg, &is_inout);
6114 if (!ok)
6116 ret = GS_ERROR;
6117 is_inout = false;
6120 if (!allows_reg && allows_mem)
6121 mark_addressable (TREE_VALUE (link));
6123 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6124 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6125 fb_lvalue | fb_mayfail);
6126 if (tret == GS_ERROR)
6128 error ("invalid lvalue in asm output %d", i);
6129 ret = tret;
6132 /* If the constraint does not allow memory make sure we gimplify
6133 it to a register if it is not already but its base is. This
6134 happens for complex and vector components. */
6135 if (!allows_mem)
6137 tree op = TREE_VALUE (link);
6138 if (! is_gimple_val (op)
6139 && is_gimple_reg_type (TREE_TYPE (op))
6140 && is_gimple_reg (get_base_address (op)))
6142 tree tem = create_tmp_reg (TREE_TYPE (op));
6143 tree ass;
6144 if (is_inout)
6146 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6147 tem, unshare_expr (op));
6148 gimplify_and_add (ass, pre_p);
6150 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6151 gimplify_and_add (ass, post_p);
6153 TREE_VALUE (link) = tem;
6154 tret = GS_OK;
6158 vec_safe_push (outputs, link);
6159 TREE_CHAIN (link) = NULL_TREE;
6161 if (is_inout)
6163 /* An input/output operand. To give the optimizers more
6164 flexibility, split it into separate input and output
6165 operands. */
6166 tree input;
6167 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6168 char buf[11];
6170 /* Turn the in/out constraint into an output constraint. */
6171 char *p = xstrdup (constraint);
6172 p[0] = '=';
6173 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6175 /* And add a matching input constraint. */
6176 if (allows_reg)
6178 sprintf (buf, "%u", i);
6180 /* If there are multiple alternatives in the constraint,
6181 handle each of them individually. Those that allow register
6182 will be replaced with operand number, the others will stay
6183 unchanged. */
6184 if (strchr (p, ',') != NULL)
6186 size_t len = 0, buflen = strlen (buf);
6187 char *beg, *end, *str, *dst;
6189 for (beg = p + 1;;)
6191 end = strchr (beg, ',');
6192 if (end == NULL)
6193 end = strchr (beg, '\0');
6194 if ((size_t) (end - beg) < buflen)
6195 len += buflen + 1;
6196 else
6197 len += end - beg + 1;
6198 if (*end)
6199 beg = end + 1;
6200 else
6201 break;
6204 str = (char *) alloca (len);
6205 for (beg = p + 1, dst = str;;)
6207 const char *tem;
6208 bool mem_p, reg_p, inout_p;
6210 end = strchr (beg, ',');
6211 if (end)
6212 *end = '\0';
6213 beg[-1] = '=';
6214 tem = beg - 1;
6215 parse_output_constraint (&tem, i, 0, 0,
6216 &mem_p, &reg_p, &inout_p);
6217 if (dst != str)
6218 *dst++ = ',';
6219 if (reg_p)
6221 memcpy (dst, buf, buflen);
6222 dst += buflen;
6224 else
6226 if (end)
6227 len = end - beg;
6228 else
6229 len = strlen (beg);
6230 memcpy (dst, beg, len);
6231 dst += len;
6233 if (end)
6234 beg = end + 1;
6235 else
6236 break;
6238 *dst = '\0';
6239 input = build_string (dst - str, str);
6241 else
6242 input = build_string (strlen (buf), buf);
6244 else
6245 input = build_string (constraint_len - 1, constraint + 1);
6247 free (p);
6249 input = build_tree_list (build_tree_list (NULL_TREE, input),
6250 unshare_expr (TREE_VALUE (link)));
6251 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6255 link_next = NULL_TREE;
6256 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6258 link_next = TREE_CHAIN (link);
6259 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6260 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6261 oconstraints, &allows_mem, &allows_reg);
6263 /* If we can't make copies, we can only accept memory. */
6264 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6266 if (allows_mem)
6267 allows_reg = 0;
6268 else
6270 error ("impossible constraint in %<asm%>");
6271 error ("non-memory input %d must stay in memory", i);
6272 return GS_ERROR;
6276 /* If the operand is a memory input, it should be an lvalue. */
6277 if (!allows_reg && allows_mem)
6279 tree inputv = TREE_VALUE (link);
6280 STRIP_NOPS (inputv);
6281 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6282 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6283 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6284 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6285 || TREE_CODE (inputv) == MODIFY_EXPR)
6286 TREE_VALUE (link) = error_mark_node;
6287 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6288 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6289 if (tret != GS_ERROR)
6291 /* Unlike output operands, memory inputs are not guaranteed
6292 to be lvalues by the FE, and while the expressions are
6293 marked addressable there, if it is e.g. a statement
6294 expression, temporaries in it might not end up being
6295 addressable. They might be already used in the IL and thus
6296 it is too late to make them addressable now though. */
6297 tree x = TREE_VALUE (link);
6298 while (handled_component_p (x))
6299 x = TREE_OPERAND (x, 0);
6300 if (TREE_CODE (x) == MEM_REF
6301 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6302 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6303 if ((VAR_P (x)
6304 || TREE_CODE (x) == PARM_DECL
6305 || TREE_CODE (x) == RESULT_DECL)
6306 && !TREE_ADDRESSABLE (x)
6307 && is_gimple_reg (x))
6309 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6310 input_location), 0,
6311 "memory input %d is not directly addressable",
6313 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6316 mark_addressable (TREE_VALUE (link));
6317 if (tret == GS_ERROR)
6319 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6320 "memory input %d is not directly addressable", i);
6321 ret = tret;
6324 else
6326 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6327 is_gimple_asm_val, fb_rvalue);
6328 if (tret == GS_ERROR)
6329 ret = tret;
6332 TREE_CHAIN (link) = NULL_TREE;
6333 vec_safe_push (inputs, link);
6336 link_next = NULL_TREE;
6337 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6339 link_next = TREE_CHAIN (link);
6340 TREE_CHAIN (link) = NULL_TREE;
6341 vec_safe_push (clobbers, link);
6344 link_next = NULL_TREE;
6345 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6347 link_next = TREE_CHAIN (link);
6348 TREE_CHAIN (link) = NULL_TREE;
6349 vec_safe_push (labels, link);
6352 /* Do not add ASMs with errors to the gimple IL stream. */
6353 if (ret != GS_ERROR)
6355 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6356 inputs, outputs, clobbers, labels);
6358 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6359 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6361 gimplify_seq_add_stmt (pre_p, stmt);
6364 return ret;
6367 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6368 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6369 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6370 return to this function.
6372 FIXME should we complexify the prequeue handling instead? Or use flags
6373 for all the cleanups and let the optimizer tighten them up? The current
6374 code seems pretty fragile; it will break on a cleanup within any
6375 non-conditional nesting. But any such nesting would be broken, anyway;
6376 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6377 and continues out of it. We can do that at the RTL level, though, so
6378 having an optimizer to tighten up try/finally regions would be a Good
6379 Thing. */
6381 static enum gimplify_status
6382 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6384 gimple_stmt_iterator iter;
6385 gimple_seq body_sequence = NULL;
6387 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6389 /* We only care about the number of conditions between the innermost
6390 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6391 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6392 int old_conds = gimplify_ctxp->conditions;
6393 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6394 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6395 gimplify_ctxp->conditions = 0;
6396 gimplify_ctxp->conditional_cleanups = NULL;
6397 gimplify_ctxp->in_cleanup_point_expr = true;
6399 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6401 gimplify_ctxp->conditions = old_conds;
6402 gimplify_ctxp->conditional_cleanups = old_cleanups;
6403 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6405 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6407 gimple *wce = gsi_stmt (iter);
6409 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6411 if (gsi_one_before_end_p (iter))
6413 /* Note that gsi_insert_seq_before and gsi_remove do not
6414 scan operands, unlike some other sequence mutators. */
6415 if (!gimple_wce_cleanup_eh_only (wce))
6416 gsi_insert_seq_before_without_update (&iter,
6417 gimple_wce_cleanup (wce),
6418 GSI_SAME_STMT);
6419 gsi_remove (&iter, true);
6420 break;
6422 else
6424 gtry *gtry;
6425 gimple_seq seq;
6426 enum gimple_try_flags kind;
6428 if (gimple_wce_cleanup_eh_only (wce))
6429 kind = GIMPLE_TRY_CATCH;
6430 else
6431 kind = GIMPLE_TRY_FINALLY;
6432 seq = gsi_split_seq_after (iter);
6434 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6435 /* Do not use gsi_replace here, as it may scan operands.
6436 We want to do a simple structural modification only. */
6437 gsi_set_stmt (&iter, gtry);
6438 iter = gsi_start (gtry->eval);
6441 else
6442 gsi_next (&iter);
6445 gimplify_seq_add_seq (pre_p, body_sequence);
6446 if (temp)
6448 *expr_p = temp;
6449 return GS_OK;
6451 else
6453 *expr_p = NULL;
6454 return GS_ALL_DONE;
6458 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6459 is the cleanup action required. EH_ONLY is true if the cleanup should
6460 only be executed if an exception is thrown, not on normal exit.
6461 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6462 only valid for clobbers. */
6464 static void
6465 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6466 bool force_uncond = false)
6468 gimple *wce;
6469 gimple_seq cleanup_stmts = NULL;
6471 /* Errors can result in improperly nested cleanups. Which results in
6472 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6473 if (seen_error ())
6474 return;
6476 if (gimple_conditional_context ())
6478 /* If we're in a conditional context, this is more complex. We only
6479 want to run the cleanup if we actually ran the initialization that
6480 necessitates it, but we want to run it after the end of the
6481 conditional context. So we wrap the try/finally around the
6482 condition and use a flag to determine whether or not to actually
6483 run the destructor. Thus
6485 test ? f(A()) : 0
6487 becomes (approximately)
6489 flag = 0;
6490 try {
6491 if (test) { A::A(temp); flag = 1; val = f(temp); }
6492 else { val = 0; }
6493 } finally {
6494 if (flag) A::~A(temp);
6498 if (force_uncond)
6500 gimplify_stmt (&cleanup, &cleanup_stmts);
6501 wce = gimple_build_wce (cleanup_stmts);
6502 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6504 else
6506 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6507 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6508 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6510 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6511 gimplify_stmt (&cleanup, &cleanup_stmts);
6512 wce = gimple_build_wce (cleanup_stmts);
6514 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6515 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6516 gimplify_seq_add_stmt (pre_p, ftrue);
6518 /* Because of this manipulation, and the EH edges that jump
6519 threading cannot redirect, the temporary (VAR) will appear
6520 to be used uninitialized. Don't warn. */
6521 TREE_NO_WARNING (var) = 1;
6524 else
6526 gimplify_stmt (&cleanup, &cleanup_stmts);
6527 wce = gimple_build_wce (cleanup_stmts);
6528 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6529 gimplify_seq_add_stmt (pre_p, wce);
6533 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6535 static enum gimplify_status
6536 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6538 tree targ = *expr_p;
6539 tree temp = TARGET_EXPR_SLOT (targ);
6540 tree init = TARGET_EXPR_INITIAL (targ);
6541 enum gimplify_status ret;
6543 bool unpoison_empty_seq = false;
6544 gimple_stmt_iterator unpoison_it;
6546 if (init)
6548 tree cleanup = NULL_TREE;
6550 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6551 to the temps list. Handle also variable length TARGET_EXPRs. */
6552 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6554 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6555 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6556 gimplify_vla_decl (temp, pre_p);
6558 else
6560 /* Save location where we need to place unpoisoning. It's possible
6561 that a variable will be converted to needs_to_live_in_memory. */
6562 unpoison_it = gsi_last (*pre_p);
6563 unpoison_empty_seq = gsi_end_p (unpoison_it);
6565 gimple_add_tmp_var (temp);
6568 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6569 expression is supposed to initialize the slot. */
6570 if (VOID_TYPE_P (TREE_TYPE (init)))
6571 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6572 else
6574 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6575 init = init_expr;
6576 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6577 init = NULL;
6578 ggc_free (init_expr);
6580 if (ret == GS_ERROR)
6582 /* PR c++/28266 Make sure this is expanded only once. */
6583 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6584 return GS_ERROR;
6586 if (init)
6587 gimplify_and_add (init, pre_p);
6589 /* If needed, push the cleanup for the temp. */
6590 if (TARGET_EXPR_CLEANUP (targ))
6592 if (CLEANUP_EH_ONLY (targ))
6593 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6594 CLEANUP_EH_ONLY (targ), pre_p);
6595 else
6596 cleanup = TARGET_EXPR_CLEANUP (targ);
6599 /* Add a clobber for the temporary going out of scope, like
6600 gimplify_bind_expr. */
6601 if (gimplify_ctxp->in_cleanup_point_expr
6602 && needs_to_live_in_memory (temp))
6604 if (flag_stack_reuse == SR_ALL)
6606 tree clobber = build_clobber (TREE_TYPE (temp));
6607 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6608 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6610 if (asan_poisoned_variables
6611 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6612 && dbg_cnt (asan_use_after_scope)
6613 && !gimplify_omp_ctxp)
6615 tree asan_cleanup = build_asan_poison_call_expr (temp);
6616 if (asan_cleanup)
6618 if (unpoison_empty_seq)
6619 unpoison_it = gsi_start (*pre_p);
6621 asan_poison_variable (temp, false, &unpoison_it,
6622 unpoison_empty_seq);
6623 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6627 if (cleanup)
6628 gimple_push_cleanup (temp, cleanup, false, pre_p);
6630 /* Only expand this once. */
6631 TREE_OPERAND (targ, 3) = init;
6632 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6634 else
6635 /* We should have expanded this before. */
6636 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6638 *expr_p = temp;
6639 return GS_OK;
6642 /* Gimplification of expression trees. */
6644 /* Gimplify an expression which appears at statement context. The
6645 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6646 NULL, a new sequence is allocated.
6648 Return true if we actually added a statement to the queue. */
6650 bool
6651 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6653 gimple_seq_node last;
6655 last = gimple_seq_last (*seq_p);
6656 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6657 return last != gimple_seq_last (*seq_p);
6660 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6661 to CTX. If entries already exist, force them to be some flavor of private.
6662 If there is no enclosing parallel, do nothing. */
6664 void
6665 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6667 splay_tree_node n;
6669 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6670 return;
6674 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6675 if (n != NULL)
6677 if (n->value & GOVD_SHARED)
6678 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6679 else if (n->value & GOVD_MAP)
6680 n->value |= GOVD_MAP_TO_ONLY;
6681 else
6682 return;
6684 else if ((ctx->region_type & ORT_TARGET) != 0)
6686 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6687 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6688 else
6689 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6691 else if (ctx->region_type != ORT_WORKSHARE
6692 && ctx->region_type != ORT_TASKGROUP
6693 && ctx->region_type != ORT_SIMD
6694 && ctx->region_type != ORT_ACC
6695 && !(ctx->region_type & ORT_TARGET_DATA))
6696 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6698 ctx = ctx->outer_context;
6700 while (ctx);
6703 /* Similarly for each of the type sizes of TYPE. */
6705 static void
6706 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6708 if (type == NULL || type == error_mark_node)
6709 return;
6710 type = TYPE_MAIN_VARIANT (type);
6712 if (ctx->privatized_types->add (type))
6713 return;
6715 switch (TREE_CODE (type))
6717 case INTEGER_TYPE:
6718 case ENUMERAL_TYPE:
6719 case BOOLEAN_TYPE:
6720 case REAL_TYPE:
6721 case FIXED_POINT_TYPE:
6722 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6723 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6724 break;
6726 case ARRAY_TYPE:
6727 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6728 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6729 break;
6731 case RECORD_TYPE:
6732 case UNION_TYPE:
6733 case QUAL_UNION_TYPE:
6735 tree field;
6736 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6737 if (TREE_CODE (field) == FIELD_DECL)
6739 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6740 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6743 break;
6745 case POINTER_TYPE:
6746 case REFERENCE_TYPE:
6747 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6748 break;
6750 default:
6751 break;
6754 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6755 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6756 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6759 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6761 static void
6762 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6764 splay_tree_node n;
6765 unsigned int nflags;
6766 tree t;
6768 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6769 return;
6771 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6772 there are constructors involved somewhere. Exception is a shared clause,
6773 there is nothing privatized in that case. */
6774 if ((flags & GOVD_SHARED) == 0
6775 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6776 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6777 flags |= GOVD_SEEN;
6779 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6780 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6782 /* We shouldn't be re-adding the decl with the same data
6783 sharing class. */
6784 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6785 nflags = n->value | flags;
6786 /* The only combination of data sharing classes we should see is
6787 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6788 reduction variables to be used in data sharing clauses. */
6789 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6790 || ((nflags & GOVD_DATA_SHARE_CLASS)
6791 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6792 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6793 n->value = nflags;
6794 return;
6797 /* When adding a variable-sized variable, we have to handle all sorts
6798 of additional bits of data: the pointer replacement variable, and
6799 the parameters of the type. */
6800 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6802 /* Add the pointer replacement variable as PRIVATE if the variable
6803 replacement is private, else FIRSTPRIVATE since we'll need the
6804 address of the original variable either for SHARED, or for the
6805 copy into or out of the context. */
6806 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
6808 if (flags & GOVD_MAP)
6809 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6810 else if (flags & GOVD_PRIVATE)
6811 nflags = GOVD_PRIVATE;
6812 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6813 && (flags & GOVD_FIRSTPRIVATE))
6814 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6815 else
6816 nflags = GOVD_FIRSTPRIVATE;
6817 nflags |= flags & GOVD_SEEN;
6818 t = DECL_VALUE_EXPR (decl);
6819 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6820 t = TREE_OPERAND (t, 0);
6821 gcc_assert (DECL_P (t));
6822 omp_add_variable (ctx, t, nflags);
6825 /* Add all of the variable and type parameters (which should have
6826 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6827 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6828 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6829 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6831 /* The variable-sized variable itself is never SHARED, only some form
6832 of PRIVATE. The sharing would take place via the pointer variable
6833 which we remapped above. */
6834 if (flags & GOVD_SHARED)
6835 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6836 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6838 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6839 alloca statement we generate for the variable, so make sure it
6840 is available. This isn't automatically needed for the SHARED
6841 case, since we won't be allocating local storage then.
6842 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6843 in this case omp_notice_variable will be called later
6844 on when it is gimplified. */
6845 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6846 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6847 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6849 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6850 && lang_hooks.decls.omp_privatize_by_reference (decl))
6852 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6854 /* Similar to the direct variable sized case above, we'll need the
6855 size of references being privatized. */
6856 if ((flags & GOVD_SHARED) == 0)
6858 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6859 if (DECL_P (t))
6860 omp_notice_variable (ctx, t, true);
6864 if (n != NULL)
6865 n->value |= flags;
6866 else
6867 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6869 /* For reductions clauses in OpenACC loop directives, by default create a
6870 copy clause on the enclosing parallel construct for carrying back the
6871 results. */
6872 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6874 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6875 while (outer_ctx)
6877 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6878 if (n != NULL)
6880 /* Ignore local variables and explicitly declared clauses. */
6881 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6882 break;
6883 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6885 /* According to the OpenACC spec, such a reduction variable
6886 should already have a copy map on a kernels construct,
6887 verify that here. */
6888 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6889 && (n->value & GOVD_MAP));
6891 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6893 /* Remove firstprivate and make it a copy map. */
6894 n->value &= ~GOVD_FIRSTPRIVATE;
6895 n->value |= GOVD_MAP;
6898 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6900 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6901 GOVD_MAP | GOVD_SEEN);
6902 break;
6904 outer_ctx = outer_ctx->outer_context;
6909 /* Notice a threadprivate variable DECL used in OMP context CTX.
6910 This just prints out diagnostics about threadprivate variable uses
6911 in untied tasks. If DECL2 is non-NULL, prevent this warning
6912 on that variable. */
6914 static bool
6915 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6916 tree decl2)
6918 splay_tree_node n;
6919 struct gimplify_omp_ctx *octx;
6921 for (octx = ctx; octx; octx = octx->outer_context)
6922 if ((octx->region_type & ORT_TARGET) != 0)
6924 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6925 if (n == NULL)
6927 error ("threadprivate variable %qE used in target region",
6928 DECL_NAME (decl));
6929 error_at (octx->location, "enclosing target region");
6930 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6932 if (decl2)
6933 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6936 if (ctx->region_type != ORT_UNTIED_TASK)
6937 return false;
6938 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6939 if (n == NULL)
6941 error ("threadprivate variable %qE used in untied task",
6942 DECL_NAME (decl));
6943 error_at (ctx->location, "enclosing task");
6944 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6946 if (decl2)
6947 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6948 return false;
6951 /* Return true if global var DECL is device resident. */
6953 static bool
6954 device_resident_p (tree decl)
6956 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6958 if (!attr)
6959 return false;
6961 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6963 tree c = TREE_VALUE (t);
6964 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6965 return true;
6968 return false;
6971 /* Return true if DECL has an ACC DECLARE attribute. */
6973 static bool
6974 is_oacc_declared (tree decl)
6976 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6977 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6978 return declared != NULL_TREE;
6981 /* Determine outer default flags for DECL mentioned in an OMP region
6982 but not declared in an enclosing clause.
6984 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6985 remapped firstprivate instead of shared. To some extent this is
6986 addressed in omp_firstprivatize_type_sizes, but not
6987 effectively. */
6989 static unsigned
6990 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6991 bool in_code, unsigned flags)
6993 enum omp_clause_default_kind default_kind = ctx->default_kind;
6994 enum omp_clause_default_kind kind;
6996 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6997 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6998 default_kind = kind;
7000 switch (default_kind)
7002 case OMP_CLAUSE_DEFAULT_NONE:
7004 const char *rtype;
7006 if (ctx->region_type & ORT_PARALLEL)
7007 rtype = "parallel";
7008 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7009 rtype = "taskloop";
7010 else if (ctx->region_type & ORT_TASK)
7011 rtype = "task";
7012 else if (ctx->region_type & ORT_TEAMS)
7013 rtype = "teams";
7014 else
7015 gcc_unreachable ();
7017 error ("%qE not specified in enclosing %qs",
7018 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7019 error_at (ctx->location, "enclosing %qs", rtype);
7021 /* FALLTHRU */
7022 case OMP_CLAUSE_DEFAULT_SHARED:
7023 flags |= GOVD_SHARED;
7024 break;
7025 case OMP_CLAUSE_DEFAULT_PRIVATE:
7026 flags |= GOVD_PRIVATE;
7027 break;
7028 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7029 flags |= GOVD_FIRSTPRIVATE;
7030 break;
7031 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7032 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7033 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7034 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7036 omp_notice_variable (octx, decl, in_code);
7037 for (; octx; octx = octx->outer_context)
7039 splay_tree_node n2;
7041 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7042 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7043 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7044 continue;
7045 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7047 flags |= GOVD_FIRSTPRIVATE;
7048 goto found_outer;
7050 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7052 flags |= GOVD_SHARED;
7053 goto found_outer;
7058 if (TREE_CODE (decl) == PARM_DECL
7059 || (!is_global_var (decl)
7060 && DECL_CONTEXT (decl) == current_function_decl))
7061 flags |= GOVD_FIRSTPRIVATE;
7062 else
7063 flags |= GOVD_SHARED;
7064 found_outer:
7065 break;
7067 default:
7068 gcc_unreachable ();
7071 return flags;
7075 /* Determine outer default flags for DECL mentioned in an OACC region
7076 but not declared in an enclosing clause. */
7078 static unsigned
7079 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7081 const char *rkind;
7082 bool on_device = false;
7083 bool declared = is_oacc_declared (decl);
7084 tree type = TREE_TYPE (decl);
7086 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7087 type = TREE_TYPE (type);
7089 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7090 && is_global_var (decl)
7091 && device_resident_p (decl))
7093 on_device = true;
7094 flags |= GOVD_MAP_TO_ONLY;
7097 switch (ctx->region_type)
7099 case ORT_ACC_KERNELS:
7100 rkind = "kernels";
7102 if (AGGREGATE_TYPE_P (type))
7104 /* Aggregates default to 'present_or_copy', or 'present'. */
7105 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7106 flags |= GOVD_MAP;
7107 else
7108 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7110 else
7111 /* Scalars default to 'copy'. */
7112 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7114 break;
7116 case ORT_ACC_PARALLEL:
7117 rkind = "parallel";
7119 if (on_device || declared)
7120 flags |= GOVD_MAP;
7121 else if (AGGREGATE_TYPE_P (type))
7123 /* Aggregates default to 'present_or_copy', or 'present'. */
7124 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7125 flags |= GOVD_MAP;
7126 else
7127 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7129 else
7130 /* Scalars default to 'firstprivate'. */
7131 flags |= GOVD_FIRSTPRIVATE;
7133 break;
7135 default:
7136 gcc_unreachable ();
7139 if (DECL_ARTIFICIAL (decl))
7140 ; /* We can get compiler-generated decls, and should not complain
7141 about them. */
7142 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7144 error ("%qE not specified in enclosing OpenACC %qs construct",
7145 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7146 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7148 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7149 ; /* Handled above. */
7150 else
7151 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7153 return flags;
7156 /* Record the fact that DECL was used within the OMP context CTX.
7157 IN_CODE is true when real code uses DECL, and false when we should
7158 merely emit default(none) errors. Return true if DECL is going to
7159 be remapped and thus DECL shouldn't be gimplified into its
7160 DECL_VALUE_EXPR (if any). */
7162 static bool
7163 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7165 splay_tree_node n;
7166 unsigned flags = in_code ? GOVD_SEEN : 0;
7167 bool ret = false, shared;
7169 if (error_operand_p (decl))
7170 return false;
7172 if (ctx->region_type == ORT_NONE)
7173 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7175 if (is_global_var (decl))
7177 /* Threadprivate variables are predetermined. */
7178 if (DECL_THREAD_LOCAL_P (decl))
7179 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7181 if (DECL_HAS_VALUE_EXPR_P (decl))
7183 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7185 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7186 return omp_notice_threadprivate_variable (ctx, decl, value);
7189 if (gimplify_omp_ctxp->outer_context == NULL
7190 && VAR_P (decl)
7191 && oacc_get_fn_attrib (current_function_decl))
7193 location_t loc = DECL_SOURCE_LOCATION (decl);
7195 if (lookup_attribute ("omp declare target link",
7196 DECL_ATTRIBUTES (decl)))
7198 error_at (loc,
7199 "%qE with %<link%> clause used in %<routine%> function",
7200 DECL_NAME (decl));
7201 return false;
7203 else if (!lookup_attribute ("omp declare target",
7204 DECL_ATTRIBUTES (decl)))
7206 error_at (loc,
7207 "%qE requires a %<declare%> directive for use "
7208 "in a %<routine%> function", DECL_NAME (decl));
7209 return false;
7214 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7215 if ((ctx->region_type & ORT_TARGET) != 0)
7217 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7218 if (n == NULL)
7220 unsigned nflags = flags;
7221 if ((ctx->region_type & ORT_ACC) == 0)
7223 bool is_declare_target = false;
7224 if (is_global_var (decl)
7225 && varpool_node::get_create (decl)->offloadable)
7227 struct gimplify_omp_ctx *octx;
7228 for (octx = ctx->outer_context;
7229 octx; octx = octx->outer_context)
7231 n = splay_tree_lookup (octx->variables,
7232 (splay_tree_key)decl);
7233 if (n
7234 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7235 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7236 break;
7238 is_declare_target = octx == NULL;
7240 if (!is_declare_target)
7242 int gdmk;
7243 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7244 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7245 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7246 == POINTER_TYPE)))
7247 gdmk = GDMK_POINTER;
7248 else if (lang_hooks.decls.omp_scalar_p (decl))
7249 gdmk = GDMK_SCALAR;
7250 else
7251 gdmk = GDMK_AGGREGATE;
7252 if (ctx->defaultmap[gdmk] == 0)
7254 tree d = lang_hooks.decls.omp_report_decl (decl);
7255 error ("%qE not specified in enclosing %<target%>",
7256 DECL_NAME (d));
7257 error_at (ctx->location, "enclosing %<target%>");
7259 else if (ctx->defaultmap[gdmk]
7260 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7261 nflags |= ctx->defaultmap[gdmk];
7262 else
7264 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7265 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7270 struct gimplify_omp_ctx *octx = ctx->outer_context;
7271 if ((ctx->region_type & ORT_ACC) && octx)
7273 /* Look in outer OpenACC contexts, to see if there's a
7274 data attribute for this variable. */
7275 omp_notice_variable (octx, decl, in_code);
7277 for (; octx; octx = octx->outer_context)
7279 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7280 break;
7281 splay_tree_node n2
7282 = splay_tree_lookup (octx->variables,
7283 (splay_tree_key) decl);
7284 if (n2)
7286 if (octx->region_type == ORT_ACC_HOST_DATA)
7287 error ("variable %qE declared in enclosing "
7288 "%<host_data%> region", DECL_NAME (decl));
7289 nflags |= GOVD_MAP;
7290 if (octx->region_type == ORT_ACC_DATA
7291 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7292 nflags |= GOVD_MAP_0LEN_ARRAY;
7293 goto found_outer;
7298 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7299 | GOVD_MAP_ALLOC_ONLY)) == flags)
7301 tree type = TREE_TYPE (decl);
7303 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7304 && lang_hooks.decls.omp_privatize_by_reference (decl))
7305 type = TREE_TYPE (type);
7306 if (!lang_hooks.types.omp_mappable_type (type))
7308 error ("%qD referenced in target region does not have "
7309 "a mappable type", decl);
7310 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7312 else
7314 if ((ctx->region_type & ORT_ACC) != 0)
7315 nflags = oacc_default_clause (ctx, decl, flags);
7316 else
7317 nflags |= GOVD_MAP;
7320 found_outer:
7321 omp_add_variable (ctx, decl, nflags);
7323 else
7325 /* If nothing changed, there's nothing left to do. */
7326 if ((n->value & flags) == flags)
7327 return ret;
7328 flags |= n->value;
7329 n->value = flags;
7331 goto do_outer;
7334 if (n == NULL)
7336 if (ctx->region_type == ORT_WORKSHARE
7337 || ctx->region_type == ORT_TASKGROUP
7338 || ctx->region_type == ORT_SIMD
7339 || ctx->region_type == ORT_ACC
7340 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7341 goto do_outer;
7343 flags = omp_default_clause (ctx, decl, in_code, flags);
7345 if ((flags & GOVD_PRIVATE)
7346 && lang_hooks.decls.omp_private_outer_ref (decl))
7347 flags |= GOVD_PRIVATE_OUTER_REF;
7349 omp_add_variable (ctx, decl, flags);
7351 shared = (flags & GOVD_SHARED) != 0;
7352 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7353 goto do_outer;
7356 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7357 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7358 && DECL_SIZE (decl))
7360 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7362 splay_tree_node n2;
7363 tree t = DECL_VALUE_EXPR (decl);
7364 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7365 t = TREE_OPERAND (t, 0);
7366 gcc_assert (DECL_P (t));
7367 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7368 n2->value |= GOVD_SEEN;
7370 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7371 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7372 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7373 != INTEGER_CST))
7375 splay_tree_node n2;
7376 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7377 gcc_assert (DECL_P (t));
7378 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7379 if (n2)
7380 omp_notice_variable (ctx, t, true);
7384 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7385 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7387 /* If nothing changed, there's nothing left to do. */
7388 if ((n->value & flags) == flags)
7389 return ret;
7390 flags |= n->value;
7391 n->value = flags;
7393 do_outer:
7394 /* If the variable is private in the current context, then we don't
7395 need to propagate anything to an outer context. */
7396 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7397 return ret;
7398 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7399 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7400 return ret;
7401 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7402 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7403 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7404 return ret;
7405 if (ctx->outer_context
7406 && omp_notice_variable (ctx->outer_context, decl, in_code))
7407 return true;
7408 return ret;
7411 /* Verify that DECL is private within CTX. If there's specific information
7412 to the contrary in the innermost scope, generate an error. */
7414 static bool
7415 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7417 splay_tree_node n;
7419 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7420 if (n != NULL)
7422 if (n->value & GOVD_SHARED)
7424 if (ctx == gimplify_omp_ctxp)
7426 if (simd)
7427 error ("iteration variable %qE is predetermined linear",
7428 DECL_NAME (decl));
7429 else
7430 error ("iteration variable %qE should be private",
7431 DECL_NAME (decl));
7432 n->value = GOVD_PRIVATE;
7433 return true;
7435 else
7436 return false;
7438 else if ((n->value & GOVD_EXPLICIT) != 0
7439 && (ctx == gimplify_omp_ctxp
7440 || (ctx->region_type == ORT_COMBINED_PARALLEL
7441 && gimplify_omp_ctxp->outer_context == ctx)))
7443 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7444 error ("iteration variable %qE should not be firstprivate",
7445 DECL_NAME (decl));
7446 else if ((n->value & GOVD_REDUCTION) != 0)
7447 error ("iteration variable %qE should not be reduction",
7448 DECL_NAME (decl));
7449 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7450 error ("iteration variable %qE should not be linear",
7451 DECL_NAME (decl));
7453 return (ctx == gimplify_omp_ctxp
7454 || (ctx->region_type == ORT_COMBINED_PARALLEL
7455 && gimplify_omp_ctxp->outer_context == ctx));
7458 if (ctx->region_type != ORT_WORKSHARE
7459 && ctx->region_type != ORT_TASKGROUP
7460 && ctx->region_type != ORT_SIMD
7461 && ctx->region_type != ORT_ACC)
7462 return false;
7463 else if (ctx->outer_context)
7464 return omp_is_private (ctx->outer_context, decl, simd);
7465 return false;
7468 /* Return true if DECL is private within a parallel region
7469 that binds to the current construct's context or in parallel
7470 region's REDUCTION clause. */
7472 static bool
7473 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7475 splay_tree_node n;
7479 ctx = ctx->outer_context;
7480 if (ctx == NULL)
7482 if (is_global_var (decl))
7483 return false;
7485 /* References might be private, but might be shared too,
7486 when checking for copyprivate, assume they might be
7487 private, otherwise assume they might be shared. */
7488 if (copyprivate)
7489 return true;
7491 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7492 return false;
7494 /* Treat C++ privatized non-static data members outside
7495 of the privatization the same. */
7496 if (omp_member_access_dummy_var (decl))
7497 return false;
7499 return true;
7502 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7504 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7505 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7506 continue;
7508 if (n != NULL)
7510 if ((n->value & GOVD_LOCAL) != 0
7511 && omp_member_access_dummy_var (decl))
7512 return false;
7513 return (n->value & GOVD_SHARED) == 0;
7516 while (ctx->region_type == ORT_WORKSHARE
7517 || ctx->region_type == ORT_TASKGROUP
7518 || ctx->region_type == ORT_SIMD
7519 || ctx->region_type == ORT_ACC);
7520 return false;
7523 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7525 static tree
7526 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7528 tree t = *tp;
7530 /* If this node has been visited, unmark it and keep looking. */
7531 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7532 return t;
7534 if (IS_TYPE_OR_DECL_P (t))
7535 *walk_subtrees = 0;
7536 return NULL_TREE;
7539 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7540 lower all the depend clauses by populating corresponding depend
7541 array. Returns 0 if there are no such depend clauses, or
7542 2 if all depend clauses should be removed, 1 otherwise. */
7544 static int
7545 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7547 tree c;
7548 gimple *g;
7549 size_t n[4] = { 0, 0, 0, 0 };
7550 bool unused[4];
7551 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7552 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7553 size_t i, j;
7554 location_t first_loc = UNKNOWN_LOCATION;
7556 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7557 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7559 switch (OMP_CLAUSE_DEPEND_KIND (c))
7561 case OMP_CLAUSE_DEPEND_IN:
7562 i = 2;
7563 break;
7564 case OMP_CLAUSE_DEPEND_OUT:
7565 case OMP_CLAUSE_DEPEND_INOUT:
7566 i = 0;
7567 break;
7568 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7569 i = 1;
7570 break;
7571 case OMP_CLAUSE_DEPEND_DEPOBJ:
7572 i = 3;
7573 break;
7574 case OMP_CLAUSE_DEPEND_SOURCE:
7575 case OMP_CLAUSE_DEPEND_SINK:
7576 continue;
7577 default:
7578 gcc_unreachable ();
7580 tree t = OMP_CLAUSE_DECL (c);
7581 if (first_loc == UNKNOWN_LOCATION)
7582 first_loc = OMP_CLAUSE_LOCATION (c);
7583 if (TREE_CODE (t) == TREE_LIST
7584 && TREE_PURPOSE (t)
7585 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7587 if (TREE_PURPOSE (t) != last_iter)
7589 tree tcnt = size_one_node;
7590 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7592 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7593 is_gimple_val, fb_rvalue) == GS_ERROR
7594 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7595 is_gimple_val, fb_rvalue) == GS_ERROR
7596 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7597 is_gimple_val, fb_rvalue) == GS_ERROR
7598 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7599 is_gimple_val, fb_rvalue)
7600 == GS_ERROR))
7601 return 2;
7602 tree var = TREE_VEC_ELT (it, 0);
7603 tree begin = TREE_VEC_ELT (it, 1);
7604 tree end = TREE_VEC_ELT (it, 2);
7605 tree step = TREE_VEC_ELT (it, 3);
7606 tree orig_step = TREE_VEC_ELT (it, 4);
7607 tree type = TREE_TYPE (var);
7608 tree stype = TREE_TYPE (step);
7609 location_t loc = DECL_SOURCE_LOCATION (var);
7610 tree endmbegin;
7611 /* Compute count for this iterator as
7612 orig_step > 0
7613 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7614 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7615 and compute product of those for the entire depend
7616 clause. */
7617 if (POINTER_TYPE_P (type))
7618 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7619 stype, end, begin);
7620 else
7621 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7622 end, begin);
7623 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7624 step,
7625 build_int_cst (stype, 1));
7626 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7627 build_int_cst (stype, 1));
7628 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7629 unshare_expr (endmbegin),
7630 stepm1);
7631 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7632 pos, step);
7633 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7634 endmbegin, stepp1);
7635 if (TYPE_UNSIGNED (stype))
7637 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7638 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7640 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7641 neg, step);
7642 step = NULL_TREE;
7643 tree cond = fold_build2_loc (loc, LT_EXPR,
7644 boolean_type_node,
7645 begin, end);
7646 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7647 build_int_cst (stype, 0));
7648 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7649 end, begin);
7650 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7651 build_int_cst (stype, 0));
7652 tree osteptype = TREE_TYPE (orig_step);
7653 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7654 orig_step,
7655 build_int_cst (osteptype, 0));
7656 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7657 cond, pos, neg);
7658 cnt = fold_convert_loc (loc, sizetype, cnt);
7659 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7660 fb_rvalue) == GS_ERROR)
7661 return 2;
7662 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7664 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7665 fb_rvalue) == GS_ERROR)
7666 return 2;
7667 last_iter = TREE_PURPOSE (t);
7668 last_count = tcnt;
7670 if (counts[i] == NULL_TREE)
7671 counts[i] = last_count;
7672 else
7673 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7674 PLUS_EXPR, counts[i], last_count);
7676 else
7677 n[i]++;
7679 for (i = 0; i < 4; i++)
7680 if (counts[i])
7681 break;
7682 if (i == 4)
7683 return 0;
7685 tree total = size_zero_node;
7686 for (i = 0; i < 4; i++)
7688 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7689 if (counts[i] == NULL_TREE)
7690 counts[i] = size_zero_node;
7691 if (n[i])
7692 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7693 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7694 fb_rvalue) == GS_ERROR)
7695 return 2;
7696 total = size_binop (PLUS_EXPR, total, counts[i]);
7699 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7700 == GS_ERROR)
7701 return 2;
7702 bool is_old = unused[1] && unused[3];
7703 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7704 size_int (is_old ? 1 : 4));
7705 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7706 tree array = create_tmp_var_raw (type);
7707 TREE_ADDRESSABLE (array) = 1;
7708 if (TREE_CODE (totalpx) != INTEGER_CST)
7710 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7711 gimplify_type_sizes (TREE_TYPE (array), pre_p);
7712 if (gimplify_omp_ctxp)
7714 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7715 while (ctx
7716 && (ctx->region_type == ORT_WORKSHARE
7717 || ctx->region_type == ORT_TASKGROUP
7718 || ctx->region_type == ORT_SIMD
7719 || ctx->region_type == ORT_ACC))
7720 ctx = ctx->outer_context;
7721 if (ctx)
7722 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7724 gimplify_vla_decl (array, pre_p);
7726 else
7727 gimple_add_tmp_var (array);
7728 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7729 NULL_TREE);
7730 tree tem;
7731 if (!is_old)
7733 tem = build2 (MODIFY_EXPR, void_type_node, r,
7734 build_int_cst (ptr_type_node, 0));
7735 gimplify_and_add (tem, pre_p);
7736 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7737 NULL_TREE);
7739 tem = build2 (MODIFY_EXPR, void_type_node, r,
7740 fold_convert (ptr_type_node, total));
7741 gimplify_and_add (tem, pre_p);
7742 for (i = 1; i < (is_old ? 2 : 4); i++)
7744 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
7745 NULL_TREE, NULL_TREE);
7746 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
7747 gimplify_and_add (tem, pre_p);
7750 tree cnts[4];
7751 for (j = 4; j; j--)
7752 if (!unused[j - 1])
7753 break;
7754 for (i = 0; i < 4; i++)
7756 if (i && (i >= j || unused[i - 1]))
7758 cnts[i] = cnts[i - 1];
7759 continue;
7761 cnts[i] = create_tmp_var (sizetype);
7762 if (i == 0)
7763 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
7764 else
7766 tree t;
7767 if (is_old)
7768 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
7769 else
7770 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
7771 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
7772 == GS_ERROR)
7773 return 2;
7774 g = gimple_build_assign (cnts[i], t);
7776 gimple_seq_add_stmt (pre_p, g);
7779 last_iter = NULL_TREE;
7780 tree last_bind = NULL_TREE;
7781 tree *last_body = NULL;
7782 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7783 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7785 switch (OMP_CLAUSE_DEPEND_KIND (c))
7787 case OMP_CLAUSE_DEPEND_IN:
7788 i = 2;
7789 break;
7790 case OMP_CLAUSE_DEPEND_OUT:
7791 case OMP_CLAUSE_DEPEND_INOUT:
7792 i = 0;
7793 break;
7794 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7795 i = 1;
7796 break;
7797 case OMP_CLAUSE_DEPEND_DEPOBJ:
7798 i = 3;
7799 break;
7800 case OMP_CLAUSE_DEPEND_SOURCE:
7801 case OMP_CLAUSE_DEPEND_SINK:
7802 continue;
7803 default:
7804 gcc_unreachable ();
7806 tree t = OMP_CLAUSE_DECL (c);
7807 if (TREE_CODE (t) == TREE_LIST
7808 && TREE_PURPOSE (t)
7809 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7811 if (TREE_PURPOSE (t) != last_iter)
7813 if (last_bind)
7814 gimplify_and_add (last_bind, pre_p);
7815 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
7816 last_bind = build3 (BIND_EXPR, void_type_node,
7817 BLOCK_VARS (block), NULL, block);
7818 TREE_SIDE_EFFECTS (last_bind) = 1;
7819 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
7820 tree *p = &BIND_EXPR_BODY (last_bind);
7821 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7823 tree var = TREE_VEC_ELT (it, 0);
7824 tree begin = TREE_VEC_ELT (it, 1);
7825 tree end = TREE_VEC_ELT (it, 2);
7826 tree step = TREE_VEC_ELT (it, 3);
7827 tree orig_step = TREE_VEC_ELT (it, 4);
7828 tree type = TREE_TYPE (var);
7829 location_t loc = DECL_SOURCE_LOCATION (var);
7830 /* Emit:
7831 var = begin;
7832 goto cond_label;
7833 beg_label:
7835 var = var + step;
7836 cond_label:
7837 if (orig_step > 0) {
7838 if (var < end) goto beg_label;
7839 } else {
7840 if (var > end) goto beg_label;
7842 for each iterator, with inner iterators added to
7843 the ... above. */
7844 tree beg_label = create_artificial_label (loc);
7845 tree cond_label = NULL_TREE;
7846 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
7847 var, begin);
7848 append_to_statement_list_force (tem, p);
7849 tem = build_and_jump (&cond_label);
7850 append_to_statement_list_force (tem, p);
7851 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
7852 append_to_statement_list (tem, p);
7853 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
7854 NULL_TREE, NULL_TREE);
7855 TREE_SIDE_EFFECTS (bind) = 1;
7856 SET_EXPR_LOCATION (bind, loc);
7857 append_to_statement_list_force (bind, p);
7858 if (POINTER_TYPE_P (type))
7859 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
7860 var, fold_convert_loc (loc, sizetype,
7861 step));
7862 else
7863 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
7864 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
7865 var, tem);
7866 append_to_statement_list_force (tem, p);
7867 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
7868 append_to_statement_list (tem, p);
7869 tree cond = fold_build2_loc (loc, LT_EXPR,
7870 boolean_type_node,
7871 var, end);
7872 tree pos
7873 = fold_build3_loc (loc, COND_EXPR, void_type_node,
7874 cond, build_and_jump (&beg_label),
7875 void_node);
7876 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7877 var, end);
7878 tree neg
7879 = fold_build3_loc (loc, COND_EXPR, void_type_node,
7880 cond, build_and_jump (&beg_label),
7881 void_node);
7882 tree osteptype = TREE_TYPE (orig_step);
7883 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7884 orig_step,
7885 build_int_cst (osteptype, 0));
7886 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
7887 cond, pos, neg);
7888 append_to_statement_list_force (tem, p);
7889 p = &BIND_EXPR_BODY (bind);
7891 last_body = p;
7893 last_iter = TREE_PURPOSE (t);
7894 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
7896 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
7897 0), last_body);
7898 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
7900 if (error_operand_p (TREE_VALUE (t)))
7901 return 2;
7902 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
7903 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
7904 NULL_TREE, NULL_TREE);
7905 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
7906 void_type_node, r, TREE_VALUE (t));
7907 append_to_statement_list_force (tem, last_body);
7908 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
7909 void_type_node, cnts[i],
7910 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
7911 append_to_statement_list_force (tem, last_body);
7912 TREE_VALUE (t) = null_pointer_node;
7914 else
7916 if (last_bind)
7918 gimplify_and_add (last_bind, pre_p);
7919 last_bind = NULL_TREE;
7921 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7923 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7924 NULL, is_gimple_val, fb_rvalue);
7925 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7927 if (error_operand_p (OMP_CLAUSE_DECL (c)))
7928 return 2;
7929 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
7930 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7931 is_gimple_val, fb_rvalue) == GS_ERROR)
7932 return 2;
7933 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
7934 NULL_TREE, NULL_TREE);
7935 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
7936 gimplify_and_add (tem, pre_p);
7937 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
7938 size_int (1)));
7939 gimple_seq_add_stmt (pre_p, g);
7942 if (last_bind)
7943 gimplify_and_add (last_bind, pre_p);
7944 tree cond = boolean_false_node;
7945 if (is_old)
7947 if (!unused[0])
7948 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
7949 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
7950 size_int (2)));
7951 if (!unused[2])
7952 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
7953 build2_loc (first_loc, NE_EXPR, boolean_type_node,
7954 cnts[2],
7955 size_binop_loc (first_loc, PLUS_EXPR,
7956 totalpx,
7957 size_int (1))));
7959 else
7961 tree prev = size_int (5);
7962 for (i = 0; i < 4; i++)
7964 if (unused[i])
7965 continue;
7966 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
7967 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
7968 build2_loc (first_loc, NE_EXPR, boolean_type_node,
7969 cnts[i], unshare_expr (prev)));
7972 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
7973 build_call_expr_loc (first_loc,
7974 builtin_decl_explicit (BUILT_IN_TRAP),
7975 0), void_node);
7976 gimplify_and_add (tem, pre_p);
7977 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7978 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
7979 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7980 OMP_CLAUSE_CHAIN (c) = *list_p;
7981 *list_p = c;
7982 return 1;
7985 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7986 and previous omp contexts. */
7988 static void
7989 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7990 enum omp_region_type region_type,
7991 enum tree_code code)
7993 struct gimplify_omp_ctx *ctx, *outer_ctx;
7994 tree c;
7995 hash_map<tree, tree> *struct_map_to_clause = NULL;
7996 tree *prev_list_p = NULL;
7997 int handled_depend_iterators = -1;
7998 int nowait = -1;
8000 ctx = new_omp_context (region_type);
8001 outer_ctx = ctx->outer_context;
8002 if (code == OMP_TARGET)
8004 if (!lang_GNU_Fortran ())
8005 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8006 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
8008 if (!lang_GNU_Fortran ())
8009 switch (code)
8011 case OMP_TARGET:
8012 case OMP_TARGET_DATA:
8013 case OMP_TARGET_ENTER_DATA:
8014 case OMP_TARGET_EXIT_DATA:
8015 case OACC_DECLARE:
8016 case OACC_HOST_DATA:
8017 case OACC_PARALLEL:
8018 case OACC_KERNELS:
8019 ctx->target_firstprivatize_array_bases = true;
8020 default:
8021 break;
8024 while ((c = *list_p) != NULL)
8026 bool remove = false;
8027 bool notice_outer = true;
8028 const char *check_non_private = NULL;
8029 unsigned int flags;
8030 tree decl;
8032 switch (OMP_CLAUSE_CODE (c))
8034 case OMP_CLAUSE_PRIVATE:
8035 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8036 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8038 flags |= GOVD_PRIVATE_OUTER_REF;
8039 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8041 else
8042 notice_outer = false;
8043 goto do_add;
8044 case OMP_CLAUSE_SHARED:
8045 flags = GOVD_SHARED | GOVD_EXPLICIT;
8046 goto do_add;
8047 case OMP_CLAUSE_FIRSTPRIVATE:
8048 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8049 check_non_private = "firstprivate";
8050 goto do_add;
8051 case OMP_CLAUSE_LASTPRIVATE:
8052 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8053 switch (code)
8055 case OMP_DISTRIBUTE:
8056 error_at (OMP_CLAUSE_LOCATION (c),
8057 "conditional %<lastprivate%> clause on "
8058 "%<distribute%> construct");
8059 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8060 break;
8061 case OMP_TASKLOOP:
8062 error_at (OMP_CLAUSE_LOCATION (c),
8063 "conditional %<lastprivate%> clause on "
8064 "%<taskloop%> construct");
8065 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8066 break;
8067 default:
8068 break;
8070 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8071 check_non_private = "lastprivate";
8072 decl = OMP_CLAUSE_DECL (c);
8073 if (error_operand_p (decl))
8074 goto do_add;
8075 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8076 && !lang_hooks.decls.omp_scalar_p (decl))
8078 error_at (OMP_CLAUSE_LOCATION (c),
8079 "non-scalar variable %qD in conditional "
8080 "%<lastprivate%> clause", decl);
8081 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8083 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8084 sorry_at (OMP_CLAUSE_LOCATION (c),
8085 "%<conditional%> modifier on %<lastprivate%> clause "
8086 "not supported yet");
8087 if (outer_ctx
8088 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8089 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8090 == ORT_COMBINED_TEAMS))
8091 && splay_tree_lookup (outer_ctx->variables,
8092 (splay_tree_key) decl) == NULL)
8094 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8095 if (outer_ctx->outer_context)
8096 omp_notice_variable (outer_ctx->outer_context, decl, true);
8098 else if (outer_ctx
8099 && (outer_ctx->region_type & ORT_TASK) != 0
8100 && outer_ctx->combined_loop
8101 && splay_tree_lookup (outer_ctx->variables,
8102 (splay_tree_key) decl) == NULL)
8104 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8105 if (outer_ctx->outer_context)
8106 omp_notice_variable (outer_ctx->outer_context, decl, true);
8108 else if (outer_ctx
8109 && (outer_ctx->region_type == ORT_WORKSHARE
8110 || outer_ctx->region_type == ORT_ACC)
8111 && outer_ctx->combined_loop
8112 && splay_tree_lookup (outer_ctx->variables,
8113 (splay_tree_key) decl) == NULL
8114 && !omp_check_private (outer_ctx, decl, false))
8116 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8117 if (outer_ctx->outer_context
8118 && (outer_ctx->outer_context->region_type
8119 == ORT_COMBINED_PARALLEL)
8120 && splay_tree_lookup (outer_ctx->outer_context->variables,
8121 (splay_tree_key) decl) == NULL)
8123 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8124 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8125 if (octx->outer_context)
8127 octx = octx->outer_context;
8128 if (octx->region_type == ORT_WORKSHARE
8129 && octx->combined_loop
8130 && splay_tree_lookup (octx->variables,
8131 (splay_tree_key) decl) == NULL
8132 && !omp_check_private (octx, decl, false))
8134 omp_add_variable (octx, decl,
8135 GOVD_LASTPRIVATE | GOVD_SEEN);
8136 octx = octx->outer_context;
8137 if (octx
8138 && ((octx->region_type & ORT_COMBINED_TEAMS)
8139 == ORT_COMBINED_TEAMS)
8140 && (splay_tree_lookup (octx->variables,
8141 (splay_tree_key) decl)
8142 == NULL))
8144 omp_add_variable (octx, decl,
8145 GOVD_SHARED | GOVD_SEEN);
8146 octx = octx->outer_context;
8149 if (octx)
8150 omp_notice_variable (octx, decl, true);
8153 else if (outer_ctx->outer_context)
8154 omp_notice_variable (outer_ctx->outer_context, decl, true);
8156 goto do_add;
8157 case OMP_CLAUSE_REDUCTION:
8158 if (OMP_CLAUSE_REDUCTION_TASK (c))
8160 if (region_type == ORT_WORKSHARE)
8162 if (nowait == -1)
8163 nowait = omp_find_clause (*list_p,
8164 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8165 if (nowait
8166 && (outer_ctx == NULL
8167 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8169 error_at (OMP_CLAUSE_LOCATION (c),
8170 "%<task%> reduction modifier on a construct "
8171 "with a %<nowait%> clause");
8172 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8175 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8177 error_at (OMP_CLAUSE_LOCATION (c),
8178 "invalid %<task%> reduction modifier on construct "
8179 "other than %<parallel%>, %<for%> or %<sections%>");
8180 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8183 /* FALLTHRU */
8184 case OMP_CLAUSE_IN_REDUCTION:
8185 case OMP_CLAUSE_TASK_REDUCTION:
8186 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8187 /* OpenACC permits reductions on private variables. */
8188 if (!(region_type & ORT_ACC)
8189 /* taskgroup is actually not a worksharing region. */
8190 && code != OMP_TASKGROUP)
8191 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8192 decl = OMP_CLAUSE_DECL (c);
8193 if (TREE_CODE (decl) == MEM_REF)
8195 tree type = TREE_TYPE (decl);
8196 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8197 NULL, is_gimple_val, fb_rvalue, false)
8198 == GS_ERROR)
8200 remove = true;
8201 break;
8203 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8204 if (DECL_P (v))
8206 omp_firstprivatize_variable (ctx, v);
8207 omp_notice_variable (ctx, v, true);
8209 decl = TREE_OPERAND (decl, 0);
8210 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8212 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8213 NULL, is_gimple_val, fb_rvalue, false)
8214 == GS_ERROR)
8216 remove = true;
8217 break;
8219 v = TREE_OPERAND (decl, 1);
8220 if (DECL_P (v))
8222 omp_firstprivatize_variable (ctx, v);
8223 omp_notice_variable (ctx, v, true);
8225 decl = TREE_OPERAND (decl, 0);
8227 if (TREE_CODE (decl) == ADDR_EXPR
8228 || TREE_CODE (decl) == INDIRECT_REF)
8229 decl = TREE_OPERAND (decl, 0);
8231 goto do_add_decl;
8232 case OMP_CLAUSE_LINEAR:
8233 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8234 is_gimple_val, fb_rvalue) == GS_ERROR)
8236 remove = true;
8237 break;
8239 else
8241 if (code == OMP_SIMD
8242 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8244 struct gimplify_omp_ctx *octx = outer_ctx;
8245 if (octx
8246 && octx->region_type == ORT_WORKSHARE
8247 && octx->combined_loop
8248 && !octx->distribute)
8250 if (octx->outer_context
8251 && (octx->outer_context->region_type
8252 == ORT_COMBINED_PARALLEL))
8253 octx = octx->outer_context->outer_context;
8254 else
8255 octx = octx->outer_context;
8257 if (octx
8258 && octx->region_type == ORT_WORKSHARE
8259 && octx->combined_loop
8260 && octx->distribute)
8262 error_at (OMP_CLAUSE_LOCATION (c),
8263 "%<linear%> clause for variable other than "
8264 "loop iterator specified on construct "
8265 "combined with %<distribute%>");
8266 remove = true;
8267 break;
8270 /* For combined #pragma omp parallel for simd, need to put
8271 lastprivate and perhaps firstprivate too on the
8272 parallel. Similarly for #pragma omp for simd. */
8273 struct gimplify_omp_ctx *octx = outer_ctx;
8274 decl = NULL_TREE;
8277 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8278 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8279 break;
8280 decl = OMP_CLAUSE_DECL (c);
8281 if (error_operand_p (decl))
8283 decl = NULL_TREE;
8284 break;
8286 flags = GOVD_SEEN;
8287 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8288 flags |= GOVD_FIRSTPRIVATE;
8289 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8290 flags |= GOVD_LASTPRIVATE;
8291 if (octx
8292 && octx->region_type == ORT_WORKSHARE
8293 && octx->combined_loop)
8295 if (octx->outer_context
8296 && (octx->outer_context->region_type
8297 == ORT_COMBINED_PARALLEL))
8298 octx = octx->outer_context;
8299 else if (omp_check_private (octx, decl, false))
8300 break;
8302 else if (octx
8303 && (octx->region_type & ORT_TASK) != 0
8304 && octx->combined_loop)
8306 else if (octx
8307 && octx->region_type == ORT_COMBINED_PARALLEL
8308 && ctx->region_type == ORT_WORKSHARE
8309 && octx == outer_ctx)
8310 flags = GOVD_SEEN | GOVD_SHARED;
8311 else if (octx
8312 && ((octx->region_type & ORT_COMBINED_TEAMS)
8313 == ORT_COMBINED_TEAMS))
8314 flags = GOVD_SEEN | GOVD_SHARED;
8315 else if (octx
8316 && octx->region_type == ORT_COMBINED_TARGET)
8318 flags &= ~GOVD_LASTPRIVATE;
8319 if (flags == GOVD_SEEN)
8320 break;
8322 else
8323 break;
8324 splay_tree_node on
8325 = splay_tree_lookup (octx->variables,
8326 (splay_tree_key) decl);
8327 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
8329 octx = NULL;
8330 break;
8332 omp_add_variable (octx, decl, flags);
8333 if (octx->outer_context == NULL)
8334 break;
8335 octx = octx->outer_context;
8337 while (1);
8338 if (octx
8339 && decl
8340 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8341 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8342 omp_notice_variable (octx, decl, true);
8344 flags = GOVD_LINEAR | GOVD_EXPLICIT;
8345 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8346 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8348 notice_outer = false;
8349 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8351 goto do_add;
8353 case OMP_CLAUSE_MAP:
8354 decl = OMP_CLAUSE_DECL (c);
8355 if (error_operand_p (decl))
8356 remove = true;
8357 switch (code)
8359 case OMP_TARGET:
8360 break;
8361 case OACC_DATA:
8362 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8363 break;
8364 /* FALLTHRU */
8365 case OMP_TARGET_DATA:
8366 case OMP_TARGET_ENTER_DATA:
8367 case OMP_TARGET_EXIT_DATA:
8368 case OACC_ENTER_DATA:
8369 case OACC_EXIT_DATA:
8370 case OACC_HOST_DATA:
8371 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8372 || (OMP_CLAUSE_MAP_KIND (c)
8373 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8374 /* For target {,enter ,exit }data only the array slice is
8375 mapped, but not the pointer to it. */
8376 remove = true;
8377 break;
8378 default:
8379 break;
8381 if (remove)
8382 break;
8383 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8385 struct gimplify_omp_ctx *octx;
8386 for (octx = outer_ctx; octx; octx = octx->outer_context)
8388 if (octx->region_type != ORT_ACC_HOST_DATA)
8389 break;
8390 splay_tree_node n2
8391 = splay_tree_lookup (octx->variables,
8392 (splay_tree_key) decl);
8393 if (n2)
8394 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8395 "declared in enclosing %<host_data%> region",
8396 DECL_NAME (decl));
8399 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8400 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8401 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8402 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8403 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8405 remove = true;
8406 break;
8408 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8409 || (OMP_CLAUSE_MAP_KIND (c)
8410 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8411 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8413 OMP_CLAUSE_SIZE (c)
8414 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8415 false);
8416 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
8417 GOVD_FIRSTPRIVATE | GOVD_SEEN);
8419 if (!DECL_P (decl))
8421 tree d = decl, *pd;
8422 if (TREE_CODE (d) == ARRAY_REF)
8424 while (TREE_CODE (d) == ARRAY_REF)
8425 d = TREE_OPERAND (d, 0);
8426 if (TREE_CODE (d) == COMPONENT_REF
8427 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
8428 decl = d;
8430 pd = &OMP_CLAUSE_DECL (c);
8431 if (d == decl
8432 && TREE_CODE (decl) == INDIRECT_REF
8433 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8434 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8435 == REFERENCE_TYPE))
8437 pd = &TREE_OPERAND (decl, 0);
8438 decl = TREE_OPERAND (decl, 0);
8440 if (TREE_CODE (decl) == COMPONENT_REF)
8442 while (TREE_CODE (decl) == COMPONENT_REF)
8443 decl = TREE_OPERAND (decl, 0);
8444 if (TREE_CODE (decl) == INDIRECT_REF
8445 && DECL_P (TREE_OPERAND (decl, 0))
8446 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8447 == REFERENCE_TYPE))
8448 decl = TREE_OPERAND (decl, 0);
8450 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
8451 == GS_ERROR)
8453 remove = true;
8454 break;
8456 if (DECL_P (decl))
8458 if (error_operand_p (decl))
8460 remove = true;
8461 break;
8464 tree stype = TREE_TYPE (decl);
8465 if (TREE_CODE (stype) == REFERENCE_TYPE)
8466 stype = TREE_TYPE (stype);
8467 if (TYPE_SIZE_UNIT (stype) == NULL
8468 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
8470 error_at (OMP_CLAUSE_LOCATION (c),
8471 "mapping field %qE of variable length "
8472 "structure", OMP_CLAUSE_DECL (c));
8473 remove = true;
8474 break;
8477 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8479 /* Error recovery. */
8480 if (prev_list_p == NULL)
8482 remove = true;
8483 break;
8485 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8487 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
8488 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
8490 remove = true;
8491 break;
8496 tree offset;
8497 poly_int64 bitsize, bitpos;
8498 machine_mode mode;
8499 int unsignedp, reversep, volatilep = 0;
8500 tree base = OMP_CLAUSE_DECL (c);
8501 while (TREE_CODE (base) == ARRAY_REF)
8502 base = TREE_OPERAND (base, 0);
8503 if (TREE_CODE (base) == INDIRECT_REF)
8504 base = TREE_OPERAND (base, 0);
8505 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8506 &mode, &unsignedp, &reversep,
8507 &volatilep);
8508 tree orig_base = base;
8509 if ((TREE_CODE (base) == INDIRECT_REF
8510 || (TREE_CODE (base) == MEM_REF
8511 && integer_zerop (TREE_OPERAND (base, 1))))
8512 && DECL_P (TREE_OPERAND (base, 0))
8513 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8514 == REFERENCE_TYPE))
8515 base = TREE_OPERAND (base, 0);
8516 gcc_assert (base == decl
8517 && (offset == NULL_TREE
8518 || poly_int_tree_p (offset)));
8520 splay_tree_node n
8521 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8522 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
8523 == GOMP_MAP_ALWAYS_POINTER);
8524 if (n == NULL || (n->value & GOVD_MAP) == 0)
8526 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8527 OMP_CLAUSE_MAP);
8528 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
8529 if (orig_base != base)
8530 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
8531 else
8532 OMP_CLAUSE_DECL (l) = decl;
8533 OMP_CLAUSE_SIZE (l) = size_int (1);
8534 if (struct_map_to_clause == NULL)
8535 struct_map_to_clause = new hash_map<tree, tree>;
8536 struct_map_to_clause->put (decl, l);
8537 if (ptr)
8539 enum gomp_map_kind mkind
8540 = code == OMP_TARGET_EXIT_DATA
8541 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8542 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8543 OMP_CLAUSE_MAP);
8544 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8545 OMP_CLAUSE_DECL (c2)
8546 = unshare_expr (OMP_CLAUSE_DECL (c));
8547 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
8548 OMP_CLAUSE_SIZE (c2)
8549 = TYPE_SIZE_UNIT (ptr_type_node);
8550 OMP_CLAUSE_CHAIN (l) = c2;
8551 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8553 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8554 tree c3
8555 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8556 OMP_CLAUSE_MAP);
8557 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8558 OMP_CLAUSE_DECL (c3)
8559 = unshare_expr (OMP_CLAUSE_DECL (c4));
8560 OMP_CLAUSE_SIZE (c3)
8561 = TYPE_SIZE_UNIT (ptr_type_node);
8562 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8563 OMP_CLAUSE_CHAIN (c2) = c3;
8565 *prev_list_p = l;
8566 prev_list_p = NULL;
8568 else
8570 OMP_CLAUSE_CHAIN (l) = c;
8571 *list_p = l;
8572 list_p = &OMP_CLAUSE_CHAIN (l);
8574 if (orig_base != base && code == OMP_TARGET)
8576 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8577 OMP_CLAUSE_MAP);
8578 enum gomp_map_kind mkind
8579 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
8580 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8581 OMP_CLAUSE_DECL (c2) = decl;
8582 OMP_CLAUSE_SIZE (c2) = size_zero_node;
8583 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
8584 OMP_CLAUSE_CHAIN (l) = c2;
8586 flags = GOVD_MAP | GOVD_EXPLICIT;
8587 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8588 flags |= GOVD_SEEN;
8589 goto do_add_decl;
8591 else
8593 tree *osc = struct_map_to_clause->get (decl);
8594 tree *sc = NULL, *scp = NULL;
8595 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8596 n->value |= GOVD_SEEN;
8597 poly_offset_int o1, o2;
8598 if (offset)
8599 o1 = wi::to_poly_offset (offset);
8600 else
8601 o1 = 0;
8602 if (maybe_ne (bitpos, 0))
8603 o1 += bits_to_bytes_round_down (bitpos);
8604 sc = &OMP_CLAUSE_CHAIN (*osc);
8605 if (*sc != c
8606 && (OMP_CLAUSE_MAP_KIND (*sc)
8607 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8608 sc = &OMP_CLAUSE_CHAIN (*sc);
8609 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
8610 if (ptr && sc == prev_list_p)
8611 break;
8612 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8613 != COMPONENT_REF
8614 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8615 != INDIRECT_REF)
8616 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8617 != ARRAY_REF))
8618 break;
8619 else
8621 tree offset2;
8622 poly_int64 bitsize2, bitpos2;
8623 base = OMP_CLAUSE_DECL (*sc);
8624 if (TREE_CODE (base) == ARRAY_REF)
8626 while (TREE_CODE (base) == ARRAY_REF)
8627 base = TREE_OPERAND (base, 0);
8628 if (TREE_CODE (base) != COMPONENT_REF
8629 || (TREE_CODE (TREE_TYPE (base))
8630 != ARRAY_TYPE))
8631 break;
8633 else if (TREE_CODE (base) == INDIRECT_REF
8634 && (TREE_CODE (TREE_OPERAND (base, 0))
8635 == COMPONENT_REF)
8636 && (TREE_CODE (TREE_TYPE
8637 (TREE_OPERAND (base, 0)))
8638 == REFERENCE_TYPE))
8639 base = TREE_OPERAND (base, 0);
8640 base = get_inner_reference (base, &bitsize2,
8641 &bitpos2, &offset2,
8642 &mode, &unsignedp,
8643 &reversep, &volatilep);
8644 if ((TREE_CODE (base) == INDIRECT_REF
8645 || (TREE_CODE (base) == MEM_REF
8646 && integer_zerop (TREE_OPERAND (base,
8647 1))))
8648 && DECL_P (TREE_OPERAND (base, 0))
8649 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8650 0)))
8651 == REFERENCE_TYPE))
8652 base = TREE_OPERAND (base, 0);
8653 if (base != decl)
8654 break;
8655 if (scp)
8656 continue;
8657 gcc_assert (offset == NULL_TREE
8658 || poly_int_tree_p (offset));
8659 tree d1 = OMP_CLAUSE_DECL (*sc);
8660 tree d2 = OMP_CLAUSE_DECL (c);
8661 while (TREE_CODE (d1) == ARRAY_REF)
8662 d1 = TREE_OPERAND (d1, 0);
8663 while (TREE_CODE (d2) == ARRAY_REF)
8664 d2 = TREE_OPERAND (d2, 0);
8665 if (TREE_CODE (d1) == INDIRECT_REF)
8666 d1 = TREE_OPERAND (d1, 0);
8667 if (TREE_CODE (d2) == INDIRECT_REF)
8668 d2 = TREE_OPERAND (d2, 0);
8669 while (TREE_CODE (d1) == COMPONENT_REF)
8670 if (TREE_CODE (d2) == COMPONENT_REF
8671 && TREE_OPERAND (d1, 1)
8672 == TREE_OPERAND (d2, 1))
8674 d1 = TREE_OPERAND (d1, 0);
8675 d2 = TREE_OPERAND (d2, 0);
8677 else
8678 break;
8679 if (d1 == d2)
8681 error_at (OMP_CLAUSE_LOCATION (c),
8682 "%qE appears more than once in map "
8683 "clauses", OMP_CLAUSE_DECL (c));
8684 remove = true;
8685 break;
8687 if (offset2)
8688 o2 = wi::to_poly_offset (offset2);
8689 else
8690 o2 = 0;
8691 o2 += bits_to_bytes_round_down (bitpos2);
8692 if (maybe_lt (o1, o2)
8693 || (known_eq (o1, 2)
8694 && maybe_lt (bitpos, bitpos2)))
8696 if (ptr)
8697 scp = sc;
8698 else
8699 break;
8702 if (remove)
8703 break;
8704 OMP_CLAUSE_SIZE (*osc)
8705 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8706 size_one_node);
8707 if (ptr)
8709 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8710 OMP_CLAUSE_MAP);
8711 tree cl = NULL_TREE;
8712 enum gomp_map_kind mkind
8713 = code == OMP_TARGET_EXIT_DATA
8714 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8715 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8716 OMP_CLAUSE_DECL (c2)
8717 = unshare_expr (OMP_CLAUSE_DECL (c));
8718 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8719 OMP_CLAUSE_SIZE (c2)
8720 = TYPE_SIZE_UNIT (ptr_type_node);
8721 cl = scp ? *prev_list_p : c2;
8722 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8724 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8725 tree c3
8726 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8727 OMP_CLAUSE_MAP);
8728 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8729 OMP_CLAUSE_DECL (c3)
8730 = unshare_expr (OMP_CLAUSE_DECL (c4));
8731 OMP_CLAUSE_SIZE (c3)
8732 = TYPE_SIZE_UNIT (ptr_type_node);
8733 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8734 if (!scp)
8735 OMP_CLAUSE_CHAIN (c2) = c3;
8736 else
8737 cl = c3;
8739 if (scp)
8740 *scp = c2;
8741 if (sc == prev_list_p)
8743 *sc = cl;
8744 prev_list_p = NULL;
8746 else
8748 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8749 list_p = prev_list_p;
8750 prev_list_p = NULL;
8751 OMP_CLAUSE_CHAIN (c) = *sc;
8752 *sc = cl;
8753 continue;
8756 else if (*sc != c)
8758 *list_p = OMP_CLAUSE_CHAIN (c);
8759 OMP_CLAUSE_CHAIN (c) = *sc;
8760 *sc = c;
8761 continue;
8765 if (!remove
8766 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8767 && OMP_CLAUSE_CHAIN (c)
8768 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8769 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8770 == GOMP_MAP_ALWAYS_POINTER))
8771 prev_list_p = list_p;
8772 break;
8774 flags = GOVD_MAP | GOVD_EXPLICIT;
8775 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8776 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8777 flags |= GOVD_MAP_ALWAYS_TO;
8778 goto do_add;
8780 case OMP_CLAUSE_DEPEND:
8781 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8783 tree deps = OMP_CLAUSE_DECL (c);
8784 while (deps && TREE_CODE (deps) == TREE_LIST)
8786 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8787 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8788 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8789 pre_p, NULL, is_gimple_val, fb_rvalue);
8790 deps = TREE_CHAIN (deps);
8792 break;
8794 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8795 break;
8796 if (handled_depend_iterators == -1)
8797 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
8798 if (handled_depend_iterators)
8800 if (handled_depend_iterators == 2)
8801 remove = true;
8802 break;
8804 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8806 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8807 NULL, is_gimple_val, fb_rvalue);
8808 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8810 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8812 remove = true;
8813 break;
8815 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8816 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8817 is_gimple_val, fb_rvalue) == GS_ERROR)
8819 remove = true;
8820 break;
8822 break;
8824 case OMP_CLAUSE_TO:
8825 case OMP_CLAUSE_FROM:
8826 case OMP_CLAUSE__CACHE_:
8827 decl = OMP_CLAUSE_DECL (c);
8828 if (error_operand_p (decl))
8830 remove = true;
8831 break;
8833 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8834 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8835 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8836 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8837 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8839 remove = true;
8840 break;
8842 if (!DECL_P (decl))
8844 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8845 NULL, is_gimple_lvalue, fb_lvalue)
8846 == GS_ERROR)
8848 remove = true;
8849 break;
8851 break;
8853 goto do_notice;
8855 case OMP_CLAUSE_USE_DEVICE_PTR:
8856 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8857 goto do_add;
8858 case OMP_CLAUSE_IS_DEVICE_PTR:
8859 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8860 goto do_add;
8862 do_add:
8863 decl = OMP_CLAUSE_DECL (c);
8864 do_add_decl:
8865 if (error_operand_p (decl))
8867 remove = true;
8868 break;
8870 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8872 tree t = omp_member_access_dummy_var (decl);
8873 if (t)
8875 tree v = DECL_VALUE_EXPR (decl);
8876 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8877 if (outer_ctx)
8878 omp_notice_variable (outer_ctx, t, true);
8881 if (code == OACC_DATA
8882 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8883 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8884 flags |= GOVD_MAP_0LEN_ARRAY;
8885 omp_add_variable (ctx, decl, flags);
8886 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8887 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
8888 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8889 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8891 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8892 GOVD_LOCAL | GOVD_SEEN);
8893 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8894 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8895 find_decl_expr,
8896 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8897 NULL) == NULL_TREE)
8898 omp_add_variable (ctx,
8899 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8900 GOVD_LOCAL | GOVD_SEEN);
8901 gimplify_omp_ctxp = ctx;
8902 push_gimplify_context ();
8904 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8905 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8907 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8908 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8909 pop_gimplify_context
8910 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8911 push_gimplify_context ();
8912 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8913 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8914 pop_gimplify_context
8915 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8916 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8917 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8919 gimplify_omp_ctxp = outer_ctx;
8921 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8922 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8924 gimplify_omp_ctxp = ctx;
8925 push_gimplify_context ();
8926 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8928 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8929 NULL, NULL);
8930 TREE_SIDE_EFFECTS (bind) = 1;
8931 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8932 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8934 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8935 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8936 pop_gimplify_context
8937 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8938 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8940 gimplify_omp_ctxp = outer_ctx;
8942 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8943 && OMP_CLAUSE_LINEAR_STMT (c))
8945 gimplify_omp_ctxp = ctx;
8946 push_gimplify_context ();
8947 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8949 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8950 NULL, NULL);
8951 TREE_SIDE_EFFECTS (bind) = 1;
8952 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8953 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8955 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8956 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8957 pop_gimplify_context
8958 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8959 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8961 gimplify_omp_ctxp = outer_ctx;
8963 if (notice_outer)
8964 goto do_notice;
8965 break;
8967 case OMP_CLAUSE_COPYIN:
8968 case OMP_CLAUSE_COPYPRIVATE:
8969 decl = OMP_CLAUSE_DECL (c);
8970 if (error_operand_p (decl))
8972 remove = true;
8973 break;
8975 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8976 && !remove
8977 && !omp_check_private (ctx, decl, true))
8979 remove = true;
8980 if (is_global_var (decl))
8982 if (DECL_THREAD_LOCAL_P (decl))
8983 remove = false;
8984 else if (DECL_HAS_VALUE_EXPR_P (decl))
8986 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8988 if (value
8989 && DECL_P (value)
8990 && DECL_THREAD_LOCAL_P (value))
8991 remove = false;
8994 if (remove)
8995 error_at (OMP_CLAUSE_LOCATION (c),
8996 "copyprivate variable %qE is not threadprivate"
8997 " or private in outer context", DECL_NAME (decl));
8999 do_notice:
9000 if ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9001 && outer_ctx
9002 && outer_ctx->region_type == ORT_COMBINED_PARALLEL
9003 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9004 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9005 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE))
9007 splay_tree_node on
9008 = splay_tree_lookup (outer_ctx->variables,
9009 (splay_tree_key)decl);
9010 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9012 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9013 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9014 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9015 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9016 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9017 == POINTER_TYPE))))
9018 omp_firstprivatize_variable (outer_ctx, decl);
9019 else
9020 omp_add_variable (outer_ctx, decl,
9021 GOVD_SEEN | GOVD_SHARED);
9022 omp_notice_variable (outer_ctx, decl, true);
9025 if (outer_ctx)
9026 omp_notice_variable (outer_ctx, decl, true);
9027 if (check_non_private
9028 && region_type == ORT_WORKSHARE
9029 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9030 || decl == OMP_CLAUSE_DECL (c)
9031 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9032 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9033 == ADDR_EXPR
9034 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9035 == POINTER_PLUS_EXPR
9036 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9037 (OMP_CLAUSE_DECL (c), 0), 0))
9038 == ADDR_EXPR)))))
9039 && omp_check_private (ctx, decl, false))
9041 error ("%s variable %qE is private in outer context",
9042 check_non_private, DECL_NAME (decl));
9043 remove = true;
9045 break;
9047 case OMP_CLAUSE_IF:
9048 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9049 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9051 const char *p[2];
9052 for (int i = 0; i < 2; i++)
9053 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9055 case VOID_CST: p[i] = "cancel"; break;
9056 case OMP_PARALLEL: p[i] = "parallel"; break;
9057 case OMP_SIMD: p[i] = "simd"; break;
9058 case OMP_TASK: p[i] = "task"; break;
9059 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9060 case OMP_TARGET_DATA: p[i] = "target data"; break;
9061 case OMP_TARGET: p[i] = "target"; break;
9062 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9063 case OMP_TARGET_ENTER_DATA:
9064 p[i] = "target enter data"; break;
9065 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9066 default: gcc_unreachable ();
9068 error_at (OMP_CLAUSE_LOCATION (c),
9069 "expected %qs %<if%> clause modifier rather than %qs",
9070 p[0], p[1]);
9071 remove = true;
9073 /* Fall through. */
9075 case OMP_CLAUSE_FINAL:
9076 OMP_CLAUSE_OPERAND (c, 0)
9077 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9078 /* Fall through. */
9080 case OMP_CLAUSE_SCHEDULE:
9081 case OMP_CLAUSE_NUM_THREADS:
9082 case OMP_CLAUSE_NUM_TEAMS:
9083 case OMP_CLAUSE_THREAD_LIMIT:
9084 case OMP_CLAUSE_DIST_SCHEDULE:
9085 case OMP_CLAUSE_DEVICE:
9086 case OMP_CLAUSE_PRIORITY:
9087 case OMP_CLAUSE_GRAINSIZE:
9088 case OMP_CLAUSE_NUM_TASKS:
9089 case OMP_CLAUSE_HINT:
9090 case OMP_CLAUSE_ASYNC:
9091 case OMP_CLAUSE_WAIT:
9092 case OMP_CLAUSE_NUM_GANGS:
9093 case OMP_CLAUSE_NUM_WORKERS:
9094 case OMP_CLAUSE_VECTOR_LENGTH:
9095 case OMP_CLAUSE_WORKER:
9096 case OMP_CLAUSE_VECTOR:
9097 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9098 is_gimple_val, fb_rvalue) == GS_ERROR)
9099 remove = true;
9100 break;
9102 case OMP_CLAUSE_GANG:
9103 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9104 is_gimple_val, fb_rvalue) == GS_ERROR)
9105 remove = true;
9106 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9107 is_gimple_val, fb_rvalue) == GS_ERROR)
9108 remove = true;
9109 break;
9111 case OMP_CLAUSE_NOWAIT:
9112 nowait = 1;
9113 break;
9115 case OMP_CLAUSE_ORDERED:
9116 case OMP_CLAUSE_UNTIED:
9117 case OMP_CLAUSE_COLLAPSE:
9118 case OMP_CLAUSE_TILE:
9119 case OMP_CLAUSE_AUTO:
9120 case OMP_CLAUSE_SEQ:
9121 case OMP_CLAUSE_INDEPENDENT:
9122 case OMP_CLAUSE_MERGEABLE:
9123 case OMP_CLAUSE_PROC_BIND:
9124 case OMP_CLAUSE_SAFELEN:
9125 case OMP_CLAUSE_SIMDLEN:
9126 case OMP_CLAUSE_NOGROUP:
9127 case OMP_CLAUSE_THREADS:
9128 case OMP_CLAUSE_SIMD:
9129 case OMP_CLAUSE_IF_PRESENT:
9130 case OMP_CLAUSE_FINALIZE:
9131 break;
9133 case OMP_CLAUSE_DEFAULTMAP:
9134 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9135 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9137 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9138 gdmkmin = GDMK_SCALAR;
9139 gdmkmax = GDMK_POINTER;
9140 break;
9141 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9142 gdmkmin = gdmkmax = GDMK_SCALAR;
9143 break;
9144 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9145 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9146 break;
9147 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9148 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9149 break;
9150 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9151 gdmkmin = gdmkmax = GDMK_POINTER;
9152 break;
9153 default:
9154 gcc_unreachable ();
9156 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9157 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9159 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9160 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9161 break;
9162 case OMP_CLAUSE_DEFAULTMAP_TO:
9163 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9164 break;
9165 case OMP_CLAUSE_DEFAULTMAP_FROM:
9166 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9167 break;
9168 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9169 ctx->defaultmap[gdmk] = GOVD_MAP;
9170 break;
9171 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9172 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9173 break;
9174 case OMP_CLAUSE_DEFAULTMAP_NONE:
9175 ctx->defaultmap[gdmk] = 0;
9176 break;
9177 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9178 switch (gdmk)
9180 case GDMK_SCALAR:
9181 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9182 break;
9183 case GDMK_AGGREGATE:
9184 case GDMK_ALLOCATABLE:
9185 ctx->defaultmap[gdmk] = GOVD_MAP;
9186 break;
9187 case GDMK_POINTER:
9188 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9189 break;
9190 default:
9191 gcc_unreachable ();
9193 break;
9194 default:
9195 gcc_unreachable ();
9197 break;
9199 case OMP_CLAUSE_ALIGNED:
9200 decl = OMP_CLAUSE_DECL (c);
9201 if (error_operand_p (decl))
9203 remove = true;
9204 break;
9206 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9207 is_gimple_val, fb_rvalue) == GS_ERROR)
9209 remove = true;
9210 break;
9212 if (!is_global_var (decl)
9213 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9214 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9215 break;
9217 case OMP_CLAUSE_NONTEMPORAL:
9218 decl = OMP_CLAUSE_DECL (c);
9219 if (error_operand_p (decl))
9221 remove = true;
9222 break;
9224 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9225 break;
9227 case OMP_CLAUSE_DEFAULT:
9228 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9229 break;
9231 default:
9232 gcc_unreachable ();
9235 if (code == OACC_DATA
9236 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9237 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9238 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9239 remove = true;
9240 if (remove)
9241 *list_p = OMP_CLAUSE_CHAIN (c);
9242 else
9243 list_p = &OMP_CLAUSE_CHAIN (c);
9246 gimplify_omp_ctxp = ctx;
9247 if (struct_map_to_clause)
9248 delete struct_map_to_clause;
9251 /* Return true if DECL is a candidate for shared to firstprivate
9252 optimization. We only consider non-addressable scalars, not
9253 too big, and not references. */
9255 static bool
9256 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
9258 if (TREE_ADDRESSABLE (decl))
9259 return false;
9260 tree type = TREE_TYPE (decl);
9261 if (!is_gimple_reg_type (type)
9262 || TREE_CODE (type) == REFERENCE_TYPE
9263 || TREE_ADDRESSABLE (type))
9264 return false;
9265 /* Don't optimize too large decls, as each thread/task will have
9266 its own. */
9267 HOST_WIDE_INT len = int_size_in_bytes (type);
9268 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
9269 return false;
9270 if (lang_hooks.decls.omp_privatize_by_reference (decl))
9271 return false;
9272 return true;
9275 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9276 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9277 GOVD_WRITTEN in outer contexts. */
9279 static void
9280 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
9282 for (; ctx; ctx = ctx->outer_context)
9284 splay_tree_node n = splay_tree_lookup (ctx->variables,
9285 (splay_tree_key) decl);
9286 if (n == NULL)
9287 continue;
9288 else if (n->value & GOVD_SHARED)
9290 n->value |= GOVD_WRITTEN;
9291 return;
9293 else if (n->value & GOVD_DATA_SHARE_CLASS)
9294 return;
9298 /* Helper callback for walk_gimple_seq to discover possible stores
9299 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9300 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9301 for those. */
9303 static tree
9304 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
9306 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
9308 *walk_subtrees = 0;
9309 if (!wi->is_lhs)
9310 return NULL_TREE;
9312 tree op = *tp;
9315 if (handled_component_p (op))
9316 op = TREE_OPERAND (op, 0);
9317 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
9318 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
9319 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
9320 else
9321 break;
9323 while (1);
9324 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
9325 return NULL_TREE;
9327 omp_mark_stores (gimplify_omp_ctxp, op);
9328 return NULL_TREE;
9331 /* Helper callback for walk_gimple_seq to discover possible stores
9332 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9333 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9334 for those. */
9336 static tree
9337 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
9338 bool *handled_ops_p,
9339 struct walk_stmt_info *wi)
9341 gimple *stmt = gsi_stmt (*gsi_p);
9342 switch (gimple_code (stmt))
9344 /* Don't recurse on OpenMP constructs for which
9345 gimplify_adjust_omp_clauses already handled the bodies,
9346 except handle gimple_omp_for_pre_body. */
9347 case GIMPLE_OMP_FOR:
9348 *handled_ops_p = true;
9349 if (gimple_omp_for_pre_body (stmt))
9350 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9351 omp_find_stores_stmt, omp_find_stores_op, wi);
9352 break;
9353 case GIMPLE_OMP_PARALLEL:
9354 case GIMPLE_OMP_TASK:
9355 case GIMPLE_OMP_SECTIONS:
9356 case GIMPLE_OMP_SINGLE:
9357 case GIMPLE_OMP_TARGET:
9358 case GIMPLE_OMP_TEAMS:
9359 case GIMPLE_OMP_CRITICAL:
9360 *handled_ops_p = true;
9361 break;
9362 default:
9363 break;
9365 return NULL_TREE;
9368 struct gimplify_adjust_omp_clauses_data
9370 tree *list_p;
9371 gimple_seq *pre_p;
9374 /* For all variables that were not actually used within the context,
9375 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9377 static int
9378 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
9380 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
9381 gimple_seq *pre_p
9382 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
9383 tree decl = (tree) n->key;
9384 unsigned flags = n->value;
9385 enum omp_clause_code code;
9386 tree clause;
9387 bool private_debug;
9389 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
9390 return 0;
9391 if ((flags & GOVD_SEEN) == 0)
9392 return 0;
9393 if (flags & GOVD_DEBUG_PRIVATE)
9395 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
9396 private_debug = true;
9398 else if (flags & GOVD_MAP)
9399 private_debug = false;
9400 else
9401 private_debug
9402 = lang_hooks.decls.omp_private_debug_clause (decl,
9403 !!(flags & GOVD_SHARED));
9404 if (private_debug)
9405 code = OMP_CLAUSE_PRIVATE;
9406 else if (flags & GOVD_MAP)
9408 code = OMP_CLAUSE_MAP;
9409 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9410 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9412 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
9413 return 0;
9416 else if (flags & GOVD_SHARED)
9418 if (is_global_var (decl))
9420 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9421 while (ctx != NULL)
9423 splay_tree_node on
9424 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9425 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
9426 | GOVD_PRIVATE | GOVD_REDUCTION
9427 | GOVD_LINEAR | GOVD_MAP)) != 0)
9428 break;
9429 ctx = ctx->outer_context;
9431 if (ctx == NULL)
9432 return 0;
9434 code = OMP_CLAUSE_SHARED;
9436 else if (flags & GOVD_PRIVATE)
9437 code = OMP_CLAUSE_PRIVATE;
9438 else if (flags & GOVD_FIRSTPRIVATE)
9440 code = OMP_CLAUSE_FIRSTPRIVATE;
9441 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
9442 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9443 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9445 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9446 "%<target%> construct", decl);
9447 return 0;
9450 else if (flags & GOVD_LASTPRIVATE)
9451 code = OMP_CLAUSE_LASTPRIVATE;
9452 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
9453 return 0;
9454 else
9455 gcc_unreachable ();
9457 if (((flags & GOVD_LASTPRIVATE)
9458 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
9459 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9460 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9462 tree chain = *list_p;
9463 clause = build_omp_clause (input_location, code);
9464 OMP_CLAUSE_DECL (clause) = decl;
9465 OMP_CLAUSE_CHAIN (clause) = chain;
9466 if (private_debug)
9467 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
9468 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
9469 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
9470 else if (code == OMP_CLAUSE_SHARED
9471 && (flags & GOVD_WRITTEN) == 0
9472 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9473 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
9474 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
9475 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
9476 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
9478 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
9479 OMP_CLAUSE_DECL (nc) = decl;
9480 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9481 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
9482 OMP_CLAUSE_DECL (clause)
9483 = build_simple_mem_ref_loc (input_location, decl);
9484 OMP_CLAUSE_DECL (clause)
9485 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
9486 build_int_cst (build_pointer_type (char_type_node), 0));
9487 OMP_CLAUSE_SIZE (clause) = size_zero_node;
9488 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9489 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
9490 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
9491 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9492 OMP_CLAUSE_CHAIN (nc) = chain;
9493 OMP_CLAUSE_CHAIN (clause) = nc;
9494 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9495 gimplify_omp_ctxp = ctx->outer_context;
9496 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
9497 pre_p, NULL, is_gimple_val, fb_rvalue);
9498 gimplify_omp_ctxp = ctx;
9500 else if (code == OMP_CLAUSE_MAP)
9502 int kind;
9503 /* Not all combinations of these GOVD_MAP flags are actually valid. */
9504 switch (flags & (GOVD_MAP_TO_ONLY
9505 | GOVD_MAP_FORCE
9506 | GOVD_MAP_FORCE_PRESENT
9507 | GOVD_MAP_ALLOC_ONLY
9508 | GOVD_MAP_FROM_ONLY))
9510 case 0:
9511 kind = GOMP_MAP_TOFROM;
9512 break;
9513 case GOVD_MAP_FORCE:
9514 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
9515 break;
9516 case GOVD_MAP_TO_ONLY:
9517 kind = GOMP_MAP_TO;
9518 break;
9519 case GOVD_MAP_FROM_ONLY:
9520 kind = GOMP_MAP_FROM;
9521 break;
9522 case GOVD_MAP_ALLOC_ONLY:
9523 kind = GOMP_MAP_ALLOC;
9524 break;
9525 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
9526 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
9527 break;
9528 case GOVD_MAP_FORCE_PRESENT:
9529 kind = GOMP_MAP_FORCE_PRESENT;
9530 break;
9531 default:
9532 gcc_unreachable ();
9534 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
9535 if (DECL_SIZE (decl)
9536 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9538 tree decl2 = DECL_VALUE_EXPR (decl);
9539 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9540 decl2 = TREE_OPERAND (decl2, 0);
9541 gcc_assert (DECL_P (decl2));
9542 tree mem = build_simple_mem_ref (decl2);
9543 OMP_CLAUSE_DECL (clause) = mem;
9544 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9545 if (gimplify_omp_ctxp->outer_context)
9547 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9548 omp_notice_variable (ctx, decl2, true);
9549 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
9551 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
9552 OMP_CLAUSE_MAP);
9553 OMP_CLAUSE_DECL (nc) = decl;
9554 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9555 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
9556 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9557 else
9558 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9559 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
9560 OMP_CLAUSE_CHAIN (clause) = nc;
9562 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
9563 && lang_hooks.decls.omp_privatize_by_reference (decl))
9565 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
9566 OMP_CLAUSE_SIZE (clause)
9567 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
9568 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9569 gimplify_omp_ctxp = ctx->outer_context;
9570 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
9571 pre_p, NULL, is_gimple_val, fb_rvalue);
9572 gimplify_omp_ctxp = ctx;
9573 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
9574 OMP_CLAUSE_MAP);
9575 OMP_CLAUSE_DECL (nc) = decl;
9576 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9577 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
9578 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
9579 OMP_CLAUSE_CHAIN (clause) = nc;
9581 else
9582 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
9584 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
9586 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
9587 OMP_CLAUSE_DECL (nc) = decl;
9588 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
9589 OMP_CLAUSE_CHAIN (nc) = chain;
9590 OMP_CLAUSE_CHAIN (clause) = nc;
9591 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9592 gimplify_omp_ctxp = ctx->outer_context;
9593 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9594 gimplify_omp_ctxp = ctx;
9596 *list_p = clause;
9597 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9598 gimplify_omp_ctxp = ctx->outer_context;
9599 lang_hooks.decls.omp_finish_clause (clause, pre_p);
9600 if (gimplify_omp_ctxp)
9601 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
9602 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
9603 && DECL_P (OMP_CLAUSE_SIZE (clause)))
9604 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
9605 true);
9606 gimplify_omp_ctxp = ctx;
9607 return 0;
9610 static void
9611 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
9612 enum tree_code code)
9614 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9615 tree c, decl;
9617 if (body)
9619 struct gimplify_omp_ctx *octx;
9620 for (octx = ctx; octx; octx = octx->outer_context)
9621 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
9622 break;
9623 if (octx)
9625 struct walk_stmt_info wi;
9626 memset (&wi, 0, sizeof (wi));
9627 walk_gimple_seq (body, omp_find_stores_stmt,
9628 omp_find_stores_op, &wi);
9631 while ((c = *list_p) != NULL)
9633 splay_tree_node n;
9634 bool remove = false;
9636 switch (OMP_CLAUSE_CODE (c))
9638 case OMP_CLAUSE_FIRSTPRIVATE:
9639 if ((ctx->region_type & ORT_TARGET)
9640 && (ctx->region_type & ORT_ACC) == 0
9641 && TYPE_ATOMIC (strip_array_types
9642 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
9644 error_at (OMP_CLAUSE_LOCATION (c),
9645 "%<_Atomic%> %qD in %<firstprivate%> clause on "
9646 "%<target%> construct", OMP_CLAUSE_DECL (c));
9647 remove = true;
9648 break;
9650 /* FALLTHRU */
9651 case OMP_CLAUSE_PRIVATE:
9652 case OMP_CLAUSE_SHARED:
9653 case OMP_CLAUSE_LINEAR:
9654 decl = OMP_CLAUSE_DECL (c);
9655 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9656 remove = !(n->value & GOVD_SEEN);
9657 if (! remove)
9659 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
9660 if ((n->value & GOVD_DEBUG_PRIVATE)
9661 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
9663 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
9664 || ((n->value & GOVD_DATA_SHARE_CLASS)
9665 == GOVD_SHARED));
9666 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
9667 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
9669 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9670 && (n->value & GOVD_WRITTEN) == 0
9671 && DECL_P (decl)
9672 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9673 OMP_CLAUSE_SHARED_READONLY (c) = 1;
9674 else if (DECL_P (decl)
9675 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9676 && (n->value & GOVD_WRITTEN) != 0)
9677 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9678 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
9679 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9680 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9682 break;
9684 case OMP_CLAUSE_LASTPRIVATE:
9685 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
9686 accurately reflect the presence of a FIRSTPRIVATE clause. */
9687 decl = OMP_CLAUSE_DECL (c);
9688 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9689 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
9690 = (n->value & GOVD_FIRSTPRIVATE) != 0;
9691 if (code == OMP_DISTRIBUTE
9692 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9694 remove = true;
9695 error_at (OMP_CLAUSE_LOCATION (c),
9696 "same variable used in %<firstprivate%> and "
9697 "%<lastprivate%> clauses on %<distribute%> "
9698 "construct");
9700 if (!remove
9701 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9702 && DECL_P (decl)
9703 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9704 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9705 break;
9707 case OMP_CLAUSE_ALIGNED:
9708 decl = OMP_CLAUSE_DECL (c);
9709 if (!is_global_var (decl))
9711 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9712 remove = n == NULL || !(n->value & GOVD_SEEN);
9713 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9715 struct gimplify_omp_ctx *octx;
9716 if (n != NULL
9717 && (n->value & (GOVD_DATA_SHARE_CLASS
9718 & ~GOVD_FIRSTPRIVATE)))
9719 remove = true;
9720 else
9721 for (octx = ctx->outer_context; octx;
9722 octx = octx->outer_context)
9724 n = splay_tree_lookup (octx->variables,
9725 (splay_tree_key) decl);
9726 if (n == NULL)
9727 continue;
9728 if (n->value & GOVD_LOCAL)
9729 break;
9730 /* We have to avoid assigning a shared variable
9731 to itself when trying to add
9732 __builtin_assume_aligned. */
9733 if (n->value & GOVD_SHARED)
9735 remove = true;
9736 break;
9741 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
9743 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9744 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9745 remove = true;
9747 break;
9749 case OMP_CLAUSE_NONTEMPORAL:
9750 decl = OMP_CLAUSE_DECL (c);
9751 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9752 remove = n == NULL || !(n->value & GOVD_SEEN);
9753 break;
9755 case OMP_CLAUSE_MAP:
9756 if (code == OMP_TARGET_EXIT_DATA
9757 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
9759 remove = true;
9760 break;
9762 decl = OMP_CLAUSE_DECL (c);
9763 /* Data clauses associated with acc parallel reductions must be
9764 compatible with present_or_copy. Warn and adjust the clause
9765 if that is not the case. */
9766 if (ctx->region_type == ORT_ACC_PARALLEL)
9768 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9769 n = NULL;
9771 if (DECL_P (t))
9772 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9774 if (n && (n->value & GOVD_REDUCTION))
9776 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9778 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9779 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9780 && kind != GOMP_MAP_FORCE_PRESENT
9781 && kind != GOMP_MAP_POINTER)
9783 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9784 "incompatible data clause with reduction "
9785 "on %qE; promoting to present_or_copy",
9786 DECL_NAME (t));
9787 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9791 if (!DECL_P (decl))
9793 if ((ctx->region_type & ORT_TARGET) != 0
9794 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9796 if (TREE_CODE (decl) == INDIRECT_REF
9797 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9798 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9799 == REFERENCE_TYPE))
9800 decl = TREE_OPERAND (decl, 0);
9801 if (TREE_CODE (decl) == COMPONENT_REF)
9803 while (TREE_CODE (decl) == COMPONENT_REF)
9804 decl = TREE_OPERAND (decl, 0);
9805 if (DECL_P (decl))
9807 n = splay_tree_lookup (ctx->variables,
9808 (splay_tree_key) decl);
9809 if (!(n->value & GOVD_SEEN))
9810 remove = true;
9814 break;
9816 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9817 if ((ctx->region_type & ORT_TARGET) != 0
9818 && !(n->value & GOVD_SEEN)
9819 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9820 && (!is_global_var (decl)
9821 || !lookup_attribute ("omp declare target link",
9822 DECL_ATTRIBUTES (decl))))
9824 remove = true;
9825 /* For struct element mapping, if struct is never referenced
9826 in target block and none of the mapping has always modifier,
9827 remove all the struct element mappings, which immediately
9828 follow the GOMP_MAP_STRUCT map clause. */
9829 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9831 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9832 while (cnt--)
9833 OMP_CLAUSE_CHAIN (c)
9834 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9837 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9838 && code == OMP_TARGET_EXIT_DATA)
9839 remove = true;
9840 else if (DECL_SIZE (decl)
9841 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9842 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9843 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9844 && (OMP_CLAUSE_MAP_KIND (c)
9845 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9847 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9848 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9849 INTEGER_CST. */
9850 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9852 tree decl2 = DECL_VALUE_EXPR (decl);
9853 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9854 decl2 = TREE_OPERAND (decl2, 0);
9855 gcc_assert (DECL_P (decl2));
9856 tree mem = build_simple_mem_ref (decl2);
9857 OMP_CLAUSE_DECL (c) = mem;
9858 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9859 if (ctx->outer_context)
9861 omp_notice_variable (ctx->outer_context, decl2, true);
9862 omp_notice_variable (ctx->outer_context,
9863 OMP_CLAUSE_SIZE (c), true);
9865 if (((ctx->region_type & ORT_TARGET) != 0
9866 || !ctx->target_firstprivatize_array_bases)
9867 && ((n->value & GOVD_SEEN) == 0
9868 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9870 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9871 OMP_CLAUSE_MAP);
9872 OMP_CLAUSE_DECL (nc) = decl;
9873 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9874 if (ctx->target_firstprivatize_array_bases)
9875 OMP_CLAUSE_SET_MAP_KIND (nc,
9876 GOMP_MAP_FIRSTPRIVATE_POINTER);
9877 else
9878 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9879 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9880 OMP_CLAUSE_CHAIN (c) = nc;
9881 c = nc;
9884 else
9886 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9887 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9888 gcc_assert ((n->value & GOVD_SEEN) == 0
9889 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9890 == 0));
9892 break;
9894 case OMP_CLAUSE_TO:
9895 case OMP_CLAUSE_FROM:
9896 case OMP_CLAUSE__CACHE_:
9897 decl = OMP_CLAUSE_DECL (c);
9898 if (!DECL_P (decl))
9899 break;
9900 if (DECL_SIZE (decl)
9901 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9903 tree decl2 = DECL_VALUE_EXPR (decl);
9904 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9905 decl2 = TREE_OPERAND (decl2, 0);
9906 gcc_assert (DECL_P (decl2));
9907 tree mem = build_simple_mem_ref (decl2);
9908 OMP_CLAUSE_DECL (c) = mem;
9909 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9910 if (ctx->outer_context)
9912 omp_notice_variable (ctx->outer_context, decl2, true);
9913 omp_notice_variable (ctx->outer_context,
9914 OMP_CLAUSE_SIZE (c), true);
9917 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9918 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9919 break;
9921 case OMP_CLAUSE_REDUCTION:
9922 case OMP_CLAUSE_IN_REDUCTION:
9923 case OMP_CLAUSE_TASK_REDUCTION:
9924 decl = OMP_CLAUSE_DECL (c);
9925 /* OpenACC reductions need a present_or_copy data clause.
9926 Add one if necessary. Emit error when the reduction is private. */
9927 if (ctx->region_type == ORT_ACC_PARALLEL)
9929 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9930 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9932 remove = true;
9933 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9934 "reduction on %qE", DECL_NAME (decl));
9936 else if ((n->value & GOVD_MAP) == 0)
9938 tree next = OMP_CLAUSE_CHAIN (c);
9939 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9940 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9941 OMP_CLAUSE_DECL (nc) = decl;
9942 OMP_CLAUSE_CHAIN (c) = nc;
9943 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9944 while (1)
9946 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9947 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9948 break;
9949 nc = OMP_CLAUSE_CHAIN (nc);
9951 OMP_CLAUSE_CHAIN (nc) = next;
9952 n->value |= GOVD_MAP;
9955 if (DECL_P (decl)
9956 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9957 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9958 break;
9959 case OMP_CLAUSE_COPYIN:
9960 case OMP_CLAUSE_COPYPRIVATE:
9961 case OMP_CLAUSE_IF:
9962 case OMP_CLAUSE_NUM_THREADS:
9963 case OMP_CLAUSE_NUM_TEAMS:
9964 case OMP_CLAUSE_THREAD_LIMIT:
9965 case OMP_CLAUSE_DIST_SCHEDULE:
9966 case OMP_CLAUSE_DEVICE:
9967 case OMP_CLAUSE_SCHEDULE:
9968 case OMP_CLAUSE_NOWAIT:
9969 case OMP_CLAUSE_ORDERED:
9970 case OMP_CLAUSE_DEFAULT:
9971 case OMP_CLAUSE_UNTIED:
9972 case OMP_CLAUSE_COLLAPSE:
9973 case OMP_CLAUSE_FINAL:
9974 case OMP_CLAUSE_MERGEABLE:
9975 case OMP_CLAUSE_PROC_BIND:
9976 case OMP_CLAUSE_SAFELEN:
9977 case OMP_CLAUSE_SIMDLEN:
9978 case OMP_CLAUSE_DEPEND:
9979 case OMP_CLAUSE_PRIORITY:
9980 case OMP_CLAUSE_GRAINSIZE:
9981 case OMP_CLAUSE_NUM_TASKS:
9982 case OMP_CLAUSE_NOGROUP:
9983 case OMP_CLAUSE_THREADS:
9984 case OMP_CLAUSE_SIMD:
9985 case OMP_CLAUSE_HINT:
9986 case OMP_CLAUSE_DEFAULTMAP:
9987 case OMP_CLAUSE_USE_DEVICE_PTR:
9988 case OMP_CLAUSE_IS_DEVICE_PTR:
9989 case OMP_CLAUSE_ASYNC:
9990 case OMP_CLAUSE_WAIT:
9991 case OMP_CLAUSE_INDEPENDENT:
9992 case OMP_CLAUSE_NUM_GANGS:
9993 case OMP_CLAUSE_NUM_WORKERS:
9994 case OMP_CLAUSE_VECTOR_LENGTH:
9995 case OMP_CLAUSE_GANG:
9996 case OMP_CLAUSE_WORKER:
9997 case OMP_CLAUSE_VECTOR:
9998 case OMP_CLAUSE_AUTO:
9999 case OMP_CLAUSE_SEQ:
10000 case OMP_CLAUSE_TILE:
10001 case OMP_CLAUSE_IF_PRESENT:
10002 case OMP_CLAUSE_FINALIZE:
10003 break;
10005 default:
10006 gcc_unreachable ();
10009 if (remove)
10010 *list_p = OMP_CLAUSE_CHAIN (c);
10011 else
10012 list_p = &OMP_CLAUSE_CHAIN (c);
10015 /* Add in any implicit data sharing. */
10016 struct gimplify_adjust_omp_clauses_data data;
10017 data.list_p = list_p;
10018 data.pre_p = pre_p;
10019 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
10021 gimplify_omp_ctxp = ctx->outer_context;
10022 delete_omp_context (ctx);
10025 /* Gimplify OACC_CACHE. */
10027 static void
10028 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
10030 tree expr = *expr_p;
10032 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
10033 OACC_CACHE);
10034 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
10035 OACC_CACHE);
10037 /* TODO: Do something sensible with this information. */
10039 *expr_p = NULL_TREE;
10042 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10043 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10044 kind. The entry kind will replace the one in CLAUSE, while the exit
10045 kind will be used in a new omp_clause and returned to the caller. */
10047 static tree
10048 gimplify_oacc_declare_1 (tree clause)
10050 HOST_WIDE_INT kind, new_op;
10051 bool ret = false;
10052 tree c = NULL;
10054 kind = OMP_CLAUSE_MAP_KIND (clause);
10056 switch (kind)
10058 case GOMP_MAP_ALLOC:
10059 new_op = GOMP_MAP_RELEASE;
10060 ret = true;
10061 break;
10063 case GOMP_MAP_FROM:
10064 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
10065 new_op = GOMP_MAP_FROM;
10066 ret = true;
10067 break;
10069 case GOMP_MAP_TOFROM:
10070 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
10071 new_op = GOMP_MAP_FROM;
10072 ret = true;
10073 break;
10075 case GOMP_MAP_DEVICE_RESIDENT:
10076 case GOMP_MAP_FORCE_DEVICEPTR:
10077 case GOMP_MAP_FORCE_PRESENT:
10078 case GOMP_MAP_LINK:
10079 case GOMP_MAP_POINTER:
10080 case GOMP_MAP_TO:
10081 break;
10083 default:
10084 gcc_unreachable ();
10085 break;
10088 if (ret)
10090 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
10091 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
10092 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
10095 return c;
10098 /* Gimplify OACC_DECLARE. */
10100 static void
10101 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
10103 tree expr = *expr_p;
10104 gomp_target *stmt;
10105 tree clauses, t, decl;
10107 clauses = OACC_DECLARE_CLAUSES (expr);
10109 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
10110 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
10112 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
10114 decl = OMP_CLAUSE_DECL (t);
10116 if (TREE_CODE (decl) == MEM_REF)
10117 decl = TREE_OPERAND (decl, 0);
10119 if (VAR_P (decl) && !is_oacc_declared (decl))
10121 tree attr = get_identifier ("oacc declare target");
10122 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
10123 DECL_ATTRIBUTES (decl));
10126 if (VAR_P (decl)
10127 && !is_global_var (decl)
10128 && DECL_CONTEXT (decl) == current_function_decl)
10130 tree c = gimplify_oacc_declare_1 (t);
10131 if (c)
10133 if (oacc_declare_returns == NULL)
10134 oacc_declare_returns = new hash_map<tree, tree>;
10136 oacc_declare_returns->put (decl, c);
10140 if (gimplify_omp_ctxp)
10141 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
10144 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
10145 clauses);
10147 gimplify_seq_add_stmt (pre_p, stmt);
10149 *expr_p = NULL_TREE;
10152 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
10153 gimplification of the body, as well as scanning the body for used
10154 variables. We need to do this scan now, because variable-sized
10155 decls will be decomposed during gimplification. */
10157 static void
10158 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
10160 tree expr = *expr_p;
10161 gimple *g;
10162 gimple_seq body = NULL;
10164 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
10165 OMP_PARALLEL_COMBINED (expr)
10166 ? ORT_COMBINED_PARALLEL
10167 : ORT_PARALLEL, OMP_PARALLEL);
10169 push_gimplify_context ();
10171 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
10172 if (gimple_code (g) == GIMPLE_BIND)
10173 pop_gimplify_context (g);
10174 else
10175 pop_gimplify_context (NULL);
10177 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
10178 OMP_PARALLEL);
10180 g = gimple_build_omp_parallel (body,
10181 OMP_PARALLEL_CLAUSES (expr),
10182 NULL_TREE, NULL_TREE);
10183 if (OMP_PARALLEL_COMBINED (expr))
10184 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
10185 gimplify_seq_add_stmt (pre_p, g);
10186 *expr_p = NULL_TREE;
10189 /* Gimplify the contents of an OMP_TASK statement. This involves
10190 gimplification of the body, as well as scanning the body for used
10191 variables. We need to do this scan now, because variable-sized
10192 decls will be decomposed during gimplification. */
10194 static void
10195 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
10197 tree expr = *expr_p;
10198 gimple *g;
10199 gimple_seq body = NULL;
10201 if (OMP_TASK_BODY (expr) == NULL_TREE)
10202 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10203 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10204 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
10206 error_at (OMP_CLAUSE_LOCATION (c),
10207 "%<mutexinoutset%> kind in %<depend%> clause on a "
10208 "%<taskwait%> construct");
10209 break;
10212 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
10213 omp_find_clause (OMP_TASK_CLAUSES (expr),
10214 OMP_CLAUSE_UNTIED)
10215 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
10217 if (OMP_TASK_BODY (expr))
10219 push_gimplify_context ();
10221 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
10222 if (gimple_code (g) == GIMPLE_BIND)
10223 pop_gimplify_context (g);
10224 else
10225 pop_gimplify_context (NULL);
10228 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
10229 OMP_TASK);
10231 g = gimple_build_omp_task (body,
10232 OMP_TASK_CLAUSES (expr),
10233 NULL_TREE, NULL_TREE,
10234 NULL_TREE, NULL_TREE, NULL_TREE);
10235 if (OMP_TASK_BODY (expr) == NULL_TREE)
10236 gimple_omp_task_set_taskwait_p (g, true);
10237 gimplify_seq_add_stmt (pre_p, g);
10238 *expr_p = NULL_TREE;
10241 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
10242 with non-NULL OMP_FOR_INIT. Also, fill in pdata array,
10243 pdata[0] non-NULL if there is anything non-trivial in between, pdata[1]
10244 is address of OMP_PARALLEL in between if any, pdata[2] is address of
10245 OMP_FOR in between if any and pdata[3] is address of the inner
10246 OMP_FOR/OMP_SIMD. */
10248 static tree
10249 find_combined_omp_for (tree *tp, int *walk_subtrees, void *data)
10251 tree **pdata = (tree **) data;
10252 *walk_subtrees = 0;
10253 switch (TREE_CODE (*tp))
10255 case OMP_FOR:
10256 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10258 pdata[3] = tp;
10259 return *tp;
10261 pdata[2] = tp;
10262 *walk_subtrees = 1;
10263 break;
10264 case OMP_SIMD:
10265 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10267 pdata[3] = tp;
10268 return *tp;
10270 break;
10271 case BIND_EXPR:
10272 if (BIND_EXPR_VARS (*tp)
10273 || (BIND_EXPR_BLOCK (*tp)
10274 && BLOCK_VARS (BIND_EXPR_BLOCK (*tp))))
10275 pdata[0] = tp;
10276 *walk_subtrees = 1;
10277 break;
10278 case STATEMENT_LIST:
10279 if (!tsi_one_before_end_p (tsi_start (*tp)))
10280 pdata[0] = tp;
10281 *walk_subtrees = 1;
10282 break;
10283 case TRY_FINALLY_EXPR:
10284 pdata[0] = tp;
10285 *walk_subtrees = 1;
10286 break;
10287 case OMP_PARALLEL:
10288 pdata[1] = tp;
10289 *walk_subtrees = 1;
10290 break;
10291 default:
10292 break;
10294 return NULL_TREE;
10297 /* Gimplify the gross structure of an OMP_FOR statement. */
10299 static enum gimplify_status
10300 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
10302 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
10303 enum gimplify_status ret = GS_ALL_DONE;
10304 enum gimplify_status tret;
10305 gomp_for *gfor;
10306 gimple_seq for_body, for_pre_body;
10307 int i;
10308 bitmap has_decl_expr = NULL;
10309 enum omp_region_type ort = ORT_WORKSHARE;
10311 orig_for_stmt = for_stmt = *expr_p;
10313 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
10315 tree *data[4] = { NULL, NULL, NULL, NULL };
10316 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
10317 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
10318 find_combined_omp_for, data, NULL);
10319 if (inner_for_stmt == NULL_TREE)
10321 gcc_assert (seen_error ());
10322 *expr_p = NULL_TREE;
10323 return GS_ERROR;
10325 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
10327 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
10328 &OMP_FOR_PRE_BODY (for_stmt));
10329 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
10331 if (OMP_FOR_PRE_BODY (inner_for_stmt))
10333 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
10334 &OMP_FOR_PRE_BODY (for_stmt));
10335 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
10338 if (data[0])
10340 /* We have some statements or variable declarations in between
10341 the composite construct directives. Move them around the
10342 inner_for_stmt. */
10343 data[0] = expr_p;
10344 for (i = 0; i < 3; i++)
10345 if (data[i])
10347 tree t = *data[i];
10348 if (i < 2 && data[i + 1] == &OMP_BODY (t))
10349 data[i + 1] = data[i];
10350 *data[i] = OMP_BODY (t);
10351 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
10352 NULL_TREE, make_node (BLOCK));
10353 OMP_BODY (t) = body;
10354 append_to_statement_list_force (inner_for_stmt,
10355 &BIND_EXPR_BODY (body));
10356 *data[3] = t;
10357 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
10358 gcc_assert (*data[3] == inner_for_stmt);
10360 return GS_OK;
10363 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
10364 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
10365 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10366 i)) == TREE_LIST
10367 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10368 i)))
10370 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
10371 /* Class iterators aren't allowed on OMP_SIMD, so the only
10372 case we need to solve is distribute parallel for. */
10373 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
10374 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
10375 && data[1]);
10376 tree orig_decl = TREE_PURPOSE (orig);
10377 tree last = TREE_VALUE (orig);
10378 tree *pc;
10379 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
10380 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
10381 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
10382 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
10383 && OMP_CLAUSE_DECL (*pc) == orig_decl)
10384 break;
10385 if (*pc == NULL_TREE)
10387 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
10389 /* private clause will appear only on inner_for_stmt.
10390 Change it into firstprivate, and add private clause
10391 on for_stmt. */
10392 tree c = copy_node (*pc);
10393 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10394 OMP_FOR_CLAUSES (for_stmt) = c;
10395 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
10396 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
10398 else
10400 /* lastprivate clause will appear on both inner_for_stmt
10401 and for_stmt. Add firstprivate clause to
10402 inner_for_stmt. */
10403 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
10404 OMP_CLAUSE_FIRSTPRIVATE);
10405 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
10406 OMP_CLAUSE_CHAIN (c) = *pc;
10407 *pc = c;
10408 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
10410 tree c = build_omp_clause (UNKNOWN_LOCATION,
10411 OMP_CLAUSE_FIRSTPRIVATE);
10412 OMP_CLAUSE_DECL (c) = last;
10413 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10414 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10415 c = build_omp_clause (UNKNOWN_LOCATION,
10416 *pc ? OMP_CLAUSE_SHARED
10417 : OMP_CLAUSE_FIRSTPRIVATE);
10418 OMP_CLAUSE_DECL (c) = orig_decl;
10419 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10420 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10422 /* Similarly, take care of C++ range for temporaries, those should
10423 be firstprivate on OMP_PARALLEL if any. */
10424 if (data[1])
10425 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
10426 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
10427 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10428 i)) == TREE_LIST
10429 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10430 i)))
10432 tree orig
10433 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
10434 tree v = TREE_CHAIN (orig);
10435 tree c = build_omp_clause (UNKNOWN_LOCATION,
10436 OMP_CLAUSE_FIRSTPRIVATE);
10437 /* First add firstprivate clause for the __for_end artificial
10438 decl. */
10439 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
10440 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
10441 == REFERENCE_TYPE)
10442 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
10443 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10444 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10445 if (TREE_VEC_ELT (v, 0))
10447 /* And now the same for __for_range artificial decl if it
10448 exists. */
10449 c = build_omp_clause (UNKNOWN_LOCATION,
10450 OMP_CLAUSE_FIRSTPRIVATE);
10451 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
10452 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
10453 == REFERENCE_TYPE)
10454 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
10455 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10456 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10461 switch (TREE_CODE (for_stmt))
10463 case OMP_FOR:
10464 case OMP_DISTRIBUTE:
10465 break;
10466 case OACC_LOOP:
10467 ort = ORT_ACC;
10468 break;
10469 case OMP_TASKLOOP:
10470 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
10471 ort = ORT_UNTIED_TASKLOOP;
10472 else
10473 ort = ORT_TASKLOOP;
10474 break;
10475 case OMP_SIMD:
10476 ort = ORT_SIMD;
10477 break;
10478 default:
10479 gcc_unreachable ();
10482 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
10483 clause for the IV. */
10484 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10486 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
10487 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10488 decl = TREE_OPERAND (t, 0);
10489 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10490 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10491 && OMP_CLAUSE_DECL (c) == decl)
10493 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
10494 break;
10498 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
10499 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
10500 TREE_CODE (for_stmt));
10502 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
10503 gimplify_omp_ctxp->distribute = true;
10505 /* Handle OMP_FOR_INIT. */
10506 for_pre_body = NULL;
10507 if ((ort == ORT_SIMD
10508 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
10509 && OMP_FOR_PRE_BODY (for_stmt))
10511 has_decl_expr = BITMAP_ALLOC (NULL);
10512 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
10513 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
10514 == VAR_DECL)
10516 t = OMP_FOR_PRE_BODY (for_stmt);
10517 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
10519 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
10521 tree_stmt_iterator si;
10522 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
10523 tsi_next (&si))
10525 t = tsi_stmt (si);
10526 if (TREE_CODE (t) == DECL_EXPR
10527 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
10528 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
10532 if (OMP_FOR_PRE_BODY (for_stmt))
10534 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
10535 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
10536 else
10538 struct gimplify_omp_ctx ctx;
10539 memset (&ctx, 0, sizeof (ctx));
10540 ctx.region_type = ORT_NONE;
10541 gimplify_omp_ctxp = &ctx;
10542 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
10543 gimplify_omp_ctxp = NULL;
10546 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
10548 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
10549 for_stmt = inner_for_stmt;
10551 /* For taskloop, need to gimplify the start, end and step before the
10552 taskloop, outside of the taskloop omp context. */
10553 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10555 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10557 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10558 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
10560 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
10561 TREE_OPERAND (t, 1)
10562 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
10563 gimple_seq_empty_p (for_pre_body)
10564 ? pre_p : &for_pre_body, NULL,
10565 false);
10566 /* Reference to pointer conversion is considered useless,
10567 but is significant for firstprivate clause. Force it
10568 here. */
10569 if (TREE_CODE (type) == POINTER_TYPE
10570 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
10571 == REFERENCE_TYPE))
10573 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
10574 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
10575 TREE_OPERAND (t, 1));
10576 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
10577 ? pre_p : &for_pre_body);
10578 TREE_OPERAND (t, 1) = v;
10580 tree c = build_omp_clause (input_location,
10581 OMP_CLAUSE_FIRSTPRIVATE);
10582 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
10583 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10584 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10587 /* Handle OMP_FOR_COND. */
10588 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10589 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
10591 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
10592 TREE_OPERAND (t, 1)
10593 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
10594 gimple_seq_empty_p (for_pre_body)
10595 ? pre_p : &for_pre_body, NULL,
10596 false);
10597 /* Reference to pointer conversion is considered useless,
10598 but is significant for firstprivate clause. Force it
10599 here. */
10600 if (TREE_CODE (type) == POINTER_TYPE
10601 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
10602 == REFERENCE_TYPE))
10604 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
10605 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
10606 TREE_OPERAND (t, 1));
10607 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
10608 ? pre_p : &for_pre_body);
10609 TREE_OPERAND (t, 1) = v;
10611 tree c = build_omp_clause (input_location,
10612 OMP_CLAUSE_FIRSTPRIVATE);
10613 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
10614 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10615 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10618 /* Handle OMP_FOR_INCR. */
10619 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10620 if (TREE_CODE (t) == MODIFY_EXPR)
10622 decl = TREE_OPERAND (t, 0);
10623 t = TREE_OPERAND (t, 1);
10624 tree *tp = &TREE_OPERAND (t, 1);
10625 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
10626 tp = &TREE_OPERAND (t, 0);
10628 if (!is_gimple_constant (*tp))
10630 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
10631 ? pre_p : &for_pre_body;
10632 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
10633 tree c = build_omp_clause (input_location,
10634 OMP_CLAUSE_FIRSTPRIVATE);
10635 OMP_CLAUSE_DECL (c) = *tp;
10636 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10637 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10642 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
10643 OMP_TASKLOOP);
10646 if (orig_for_stmt != for_stmt)
10647 gimplify_omp_ctxp->combined_loop = true;
10649 for_body = NULL;
10650 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10651 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
10652 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10653 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
10655 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
10656 bool is_doacross = false;
10657 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
10659 is_doacross = true;
10660 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
10661 (OMP_FOR_INIT (for_stmt))
10662 * 2);
10664 int collapse = 1, tile = 0;
10665 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
10666 if (c)
10667 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
10668 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
10669 if (c)
10670 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
10671 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10673 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10674 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10675 decl = TREE_OPERAND (t, 0);
10676 gcc_assert (DECL_P (decl));
10677 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
10678 || POINTER_TYPE_P (TREE_TYPE (decl)));
10679 if (is_doacross)
10681 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
10683 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
10684 if (TREE_CODE (orig_decl) == TREE_LIST)
10686 orig_decl = TREE_PURPOSE (orig_decl);
10687 if (!orig_decl)
10688 orig_decl = decl;
10690 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
10692 else
10693 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
10694 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
10697 /* Make sure the iteration variable is private. */
10698 tree c = NULL_TREE;
10699 tree c2 = NULL_TREE;
10700 if (orig_for_stmt != for_stmt)
10702 /* Preserve this information until we gimplify the inner simd. */
10703 if (has_decl_expr
10704 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
10705 TREE_PRIVATE (t) = 1;
10707 else if (ort == ORT_SIMD)
10709 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10710 (splay_tree_key) decl);
10711 omp_is_private (gimplify_omp_ctxp, decl,
10712 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10713 != 1));
10714 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10715 omp_notice_variable (gimplify_omp_ctxp, decl, true);
10716 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10718 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
10719 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
10720 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
10721 if ((has_decl_expr
10722 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
10723 || TREE_PRIVATE (t))
10725 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10726 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10728 struct gimplify_omp_ctx *outer
10729 = gimplify_omp_ctxp->outer_context;
10730 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
10732 if (outer->region_type == ORT_WORKSHARE
10733 && outer->combined_loop)
10735 n = splay_tree_lookup (outer->variables,
10736 (splay_tree_key)decl);
10737 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10739 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10740 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10742 else
10744 struct gimplify_omp_ctx *octx = outer->outer_context;
10745 if (octx
10746 && octx->region_type == ORT_COMBINED_PARALLEL
10747 && octx->outer_context
10748 && (octx->outer_context->region_type
10749 == ORT_WORKSHARE)
10750 && octx->outer_context->combined_loop)
10752 octx = octx->outer_context;
10753 n = splay_tree_lookup (octx->variables,
10754 (splay_tree_key)decl);
10755 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10757 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10758 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10765 OMP_CLAUSE_DECL (c) = decl;
10766 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10767 OMP_FOR_CLAUSES (for_stmt) = c;
10768 omp_add_variable (gimplify_omp_ctxp, decl, flags);
10769 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
10771 if (outer->region_type == ORT_WORKSHARE
10772 && outer->combined_loop)
10774 if (outer->outer_context
10775 && (outer->outer_context->region_type
10776 == ORT_COMBINED_PARALLEL))
10777 outer = outer->outer_context;
10778 else if (omp_check_private (outer, decl, false))
10779 outer = NULL;
10781 else if (((outer->region_type & ORT_TASKLOOP)
10782 == ORT_TASKLOOP)
10783 && outer->combined_loop
10784 && !omp_check_private (gimplify_omp_ctxp,
10785 decl, false))
10787 else if (outer->region_type != ORT_COMBINED_PARALLEL)
10789 omp_notice_variable (outer, decl, true);
10790 outer = NULL;
10792 if (outer)
10794 n = splay_tree_lookup (outer->variables,
10795 (splay_tree_key)decl);
10796 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10798 omp_add_variable (outer, decl,
10799 GOVD_LASTPRIVATE | GOVD_SEEN);
10800 if (outer->region_type == ORT_COMBINED_PARALLEL
10801 && outer->outer_context
10802 && (outer->outer_context->region_type
10803 == ORT_WORKSHARE)
10804 && outer->outer_context->combined_loop)
10806 outer = outer->outer_context;
10807 n = splay_tree_lookup (outer->variables,
10808 (splay_tree_key)decl);
10809 if (omp_check_private (outer, decl, false))
10810 outer = NULL;
10811 else if (n == NULL
10812 || ((n->value & GOVD_DATA_SHARE_CLASS)
10813 == 0))
10814 omp_add_variable (outer, decl,
10815 GOVD_LASTPRIVATE
10816 | GOVD_SEEN);
10817 else
10818 outer = NULL;
10820 if (outer && outer->outer_context
10821 && ((outer->outer_context->region_type
10822 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
10823 || (((outer->region_type & ORT_TASKLOOP)
10824 == ORT_TASKLOOP)
10825 && (outer->outer_context->region_type
10826 == ORT_COMBINED_PARALLEL))))
10828 outer = outer->outer_context;
10829 n = splay_tree_lookup (outer->variables,
10830 (splay_tree_key)decl);
10831 if (n == NULL
10832 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10833 omp_add_variable (outer, decl,
10834 GOVD_SHARED | GOVD_SEEN);
10835 else
10836 outer = NULL;
10838 if (outer && outer->outer_context)
10839 omp_notice_variable (outer->outer_context, decl,
10840 true);
10845 else
10847 bool lastprivate
10848 = (!has_decl_expr
10849 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
10850 if (TREE_PRIVATE (t))
10851 lastprivate = false;
10852 struct gimplify_omp_ctx *outer
10853 = gimplify_omp_ctxp->outer_context;
10854 if (outer && lastprivate)
10856 if (outer->region_type == ORT_WORKSHARE
10857 && outer->combined_loop)
10859 n = splay_tree_lookup (outer->variables,
10860 (splay_tree_key)decl);
10861 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10863 lastprivate = false;
10864 outer = NULL;
10866 else if (outer->outer_context
10867 && (outer->outer_context->region_type
10868 == ORT_COMBINED_PARALLEL))
10869 outer = outer->outer_context;
10870 else if (omp_check_private (outer, decl, false))
10871 outer = NULL;
10873 else if (((outer->region_type & ORT_TASKLOOP)
10874 == ORT_TASKLOOP)
10875 && outer->combined_loop
10876 && !omp_check_private (gimplify_omp_ctxp,
10877 decl, false))
10879 else if (outer->region_type != ORT_COMBINED_PARALLEL)
10881 omp_notice_variable (outer, decl, true);
10882 outer = NULL;
10884 if (outer)
10886 n = splay_tree_lookup (outer->variables,
10887 (splay_tree_key)decl);
10888 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10890 omp_add_variable (outer, decl,
10891 GOVD_LASTPRIVATE | GOVD_SEEN);
10892 if (outer->region_type == ORT_COMBINED_PARALLEL
10893 && outer->outer_context
10894 && (outer->outer_context->region_type
10895 == ORT_WORKSHARE)
10896 && outer->outer_context->combined_loop)
10898 outer = outer->outer_context;
10899 n = splay_tree_lookup (outer->variables,
10900 (splay_tree_key)decl);
10901 if (omp_check_private (outer, decl, false))
10902 outer = NULL;
10903 else if (n == NULL
10904 || ((n->value & GOVD_DATA_SHARE_CLASS)
10905 == 0))
10906 omp_add_variable (outer, decl,
10907 GOVD_LASTPRIVATE
10908 | GOVD_SEEN);
10909 else
10910 outer = NULL;
10912 if (outer && outer->outer_context
10913 && ((outer->outer_context->region_type
10914 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
10915 || (((outer->region_type & ORT_TASKLOOP)
10916 == ORT_TASKLOOP)
10917 && (outer->outer_context->region_type
10918 == ORT_COMBINED_PARALLEL))))
10920 outer = outer->outer_context;
10921 n = splay_tree_lookup (outer->variables,
10922 (splay_tree_key)decl);
10923 if (n == NULL
10924 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10925 omp_add_variable (outer, decl,
10926 GOVD_SHARED | GOVD_SEEN);
10927 else
10928 outer = NULL;
10930 if (outer && outer->outer_context)
10931 omp_notice_variable (outer->outer_context, decl,
10932 true);
10937 c = build_omp_clause (input_location,
10938 lastprivate ? OMP_CLAUSE_LASTPRIVATE
10939 : OMP_CLAUSE_PRIVATE);
10940 OMP_CLAUSE_DECL (c) = decl;
10941 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10942 OMP_FOR_CLAUSES (for_stmt) = c;
10943 omp_add_variable (gimplify_omp_ctxp, decl,
10944 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
10945 | GOVD_EXPLICIT | GOVD_SEEN);
10946 c = NULL_TREE;
10949 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
10950 omp_notice_variable (gimplify_omp_ctxp, decl, true);
10951 else
10952 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
10954 /* If DECL is not a gimple register, create a temporary variable to act
10955 as an iteration counter. This is valid, since DECL cannot be
10956 modified in the body of the loop. Similarly for any iteration vars
10957 in simd with collapse > 1 where the iterator vars must be
10958 lastprivate. */
10959 if (orig_for_stmt != for_stmt)
10960 var = decl;
10961 else if (!is_gimple_reg (decl)
10962 || (ort == ORT_SIMD
10963 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
10965 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10966 /* Make sure omp_add_variable is not called on it prematurely.
10967 We call it ourselves a few lines later. */
10968 gimplify_omp_ctxp = NULL;
10969 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10970 gimplify_omp_ctxp = ctx;
10971 TREE_OPERAND (t, 0) = var;
10973 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
10975 if (ort == ORT_SIMD
10976 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10978 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
10979 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
10980 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
10981 OMP_CLAUSE_DECL (c2) = var;
10982 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
10983 OMP_FOR_CLAUSES (for_stmt) = c2;
10984 omp_add_variable (gimplify_omp_ctxp, var,
10985 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
10986 if (c == NULL_TREE)
10988 c = c2;
10989 c2 = NULL_TREE;
10992 else
10993 omp_add_variable (gimplify_omp_ctxp, var,
10994 GOVD_PRIVATE | GOVD_SEEN);
10996 else
10997 var = decl;
10999 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11000 is_gimple_val, fb_rvalue, false);
11001 ret = MIN (ret, tret);
11002 if (ret == GS_ERROR)
11003 return ret;
11005 /* Handle OMP_FOR_COND. */
11006 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11007 gcc_assert (COMPARISON_CLASS_P (t));
11008 gcc_assert (TREE_OPERAND (t, 0) == decl);
11010 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11011 is_gimple_val, fb_rvalue, false);
11012 ret = MIN (ret, tret);
11014 /* Handle OMP_FOR_INCR. */
11015 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11016 switch (TREE_CODE (t))
11018 case PREINCREMENT_EXPR:
11019 case POSTINCREMENT_EXPR:
11021 tree decl = TREE_OPERAND (t, 0);
11022 /* c_omp_for_incr_canonicalize_ptr() should have been
11023 called to massage things appropriately. */
11024 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11026 if (orig_for_stmt != for_stmt)
11027 break;
11028 t = build_int_cst (TREE_TYPE (decl), 1);
11029 if (c)
11030 OMP_CLAUSE_LINEAR_STEP (c) = t;
11031 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11032 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11033 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11034 break;
11037 case PREDECREMENT_EXPR:
11038 case POSTDECREMENT_EXPR:
11039 /* c_omp_for_incr_canonicalize_ptr() should have been
11040 called to massage things appropriately. */
11041 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11042 if (orig_for_stmt != for_stmt)
11043 break;
11044 t = build_int_cst (TREE_TYPE (decl), -1);
11045 if (c)
11046 OMP_CLAUSE_LINEAR_STEP (c) = t;
11047 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11048 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11049 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11050 break;
11052 case MODIFY_EXPR:
11053 gcc_assert (TREE_OPERAND (t, 0) == decl);
11054 TREE_OPERAND (t, 0) = var;
11056 t = TREE_OPERAND (t, 1);
11057 switch (TREE_CODE (t))
11059 case PLUS_EXPR:
11060 if (TREE_OPERAND (t, 1) == decl)
11062 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
11063 TREE_OPERAND (t, 0) = var;
11064 break;
11067 /* Fallthru. */
11068 case MINUS_EXPR:
11069 case POINTER_PLUS_EXPR:
11070 gcc_assert (TREE_OPERAND (t, 0) == decl);
11071 TREE_OPERAND (t, 0) = var;
11072 break;
11073 default:
11074 gcc_unreachable ();
11077 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11078 is_gimple_val, fb_rvalue, false);
11079 ret = MIN (ret, tret);
11080 if (c)
11082 tree step = TREE_OPERAND (t, 1);
11083 tree stept = TREE_TYPE (decl);
11084 if (POINTER_TYPE_P (stept))
11085 stept = sizetype;
11086 step = fold_convert (stept, step);
11087 if (TREE_CODE (t) == MINUS_EXPR)
11088 step = fold_build1 (NEGATE_EXPR, stept, step);
11089 OMP_CLAUSE_LINEAR_STEP (c) = step;
11090 if (step != TREE_OPERAND (t, 1))
11092 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
11093 &for_pre_body, NULL,
11094 is_gimple_val, fb_rvalue, false);
11095 ret = MIN (ret, tret);
11098 break;
11100 default:
11101 gcc_unreachable ();
11104 if (c2)
11106 gcc_assert (c);
11107 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
11110 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
11112 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
11113 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11114 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
11115 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11116 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
11117 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
11118 && OMP_CLAUSE_DECL (c) == decl)
11120 if (is_doacross && (collapse == 1 || i >= collapse))
11121 t = var;
11122 else
11124 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11125 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11126 gcc_assert (TREE_OPERAND (t, 0) == var);
11127 t = TREE_OPERAND (t, 1);
11128 gcc_assert (TREE_CODE (t) == PLUS_EXPR
11129 || TREE_CODE (t) == MINUS_EXPR
11130 || TREE_CODE (t) == POINTER_PLUS_EXPR);
11131 gcc_assert (TREE_OPERAND (t, 0) == var);
11132 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
11133 is_doacross ? var : decl,
11134 TREE_OPERAND (t, 1));
11136 gimple_seq *seq;
11137 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11138 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
11139 else
11140 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
11141 gimplify_assign (decl, t, seq);
11146 BITMAP_FREE (has_decl_expr);
11148 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11150 push_gimplify_context ();
11151 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
11153 OMP_FOR_BODY (orig_for_stmt)
11154 = build3 (BIND_EXPR, void_type_node, NULL,
11155 OMP_FOR_BODY (orig_for_stmt), NULL);
11156 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
11160 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
11161 &for_body);
11163 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11165 if (gimple_code (g) == GIMPLE_BIND)
11166 pop_gimplify_context (g);
11167 else
11168 pop_gimplify_context (NULL);
11171 if (orig_for_stmt != for_stmt)
11172 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11174 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11175 decl = TREE_OPERAND (t, 0);
11176 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11177 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11178 gimplify_omp_ctxp = ctx->outer_context;
11179 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11180 gimplify_omp_ctxp = ctx;
11181 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
11182 TREE_OPERAND (t, 0) = var;
11183 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11184 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
11185 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
11188 gimplify_adjust_omp_clauses (pre_p, for_body,
11189 &OMP_FOR_CLAUSES (orig_for_stmt),
11190 TREE_CODE (orig_for_stmt));
11192 int kind;
11193 switch (TREE_CODE (orig_for_stmt))
11195 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
11196 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
11197 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
11198 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
11199 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
11200 default:
11201 gcc_unreachable ();
11203 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
11204 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
11205 for_pre_body);
11206 if (orig_for_stmt != for_stmt)
11207 gimple_omp_for_set_combined_p (gfor, true);
11208 if (gimplify_omp_ctxp
11209 && (gimplify_omp_ctxp->combined_loop
11210 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
11211 && gimplify_omp_ctxp->outer_context
11212 && gimplify_omp_ctxp->outer_context->combined_loop)))
11214 gimple_omp_for_set_combined_into_p (gfor, true);
11215 if (gimplify_omp_ctxp->combined_loop)
11216 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
11217 else
11218 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
11221 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11223 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11224 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
11225 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
11226 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11227 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
11228 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
11229 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11230 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
11233 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
11234 constructs with GIMPLE_OMP_TASK sandwiched in between them.
11235 The outer taskloop stands for computing the number of iterations,
11236 counts for collapsed loops and holding taskloop specific clauses.
11237 The task construct stands for the effect of data sharing on the
11238 explicit task it creates and the inner taskloop stands for expansion
11239 of the static loop inside of the explicit task construct. */
11240 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11242 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
11243 tree task_clauses = NULL_TREE;
11244 tree c = *gfor_clauses_ptr;
11245 tree *gtask_clauses_ptr = &task_clauses;
11246 tree outer_for_clauses = NULL_TREE;
11247 tree *gforo_clauses_ptr = &outer_for_clauses;
11248 for (; c; c = OMP_CLAUSE_CHAIN (c))
11249 switch (OMP_CLAUSE_CODE (c))
11251 /* These clauses are allowed on task, move them there. */
11252 case OMP_CLAUSE_SHARED:
11253 case OMP_CLAUSE_FIRSTPRIVATE:
11254 case OMP_CLAUSE_DEFAULT:
11255 case OMP_CLAUSE_IF:
11256 case OMP_CLAUSE_UNTIED:
11257 case OMP_CLAUSE_FINAL:
11258 case OMP_CLAUSE_MERGEABLE:
11259 case OMP_CLAUSE_PRIORITY:
11260 case OMP_CLAUSE_REDUCTION:
11261 case OMP_CLAUSE_IN_REDUCTION:
11262 *gtask_clauses_ptr = c;
11263 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11264 break;
11265 case OMP_CLAUSE_PRIVATE:
11266 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
11268 /* We want private on outer for and firstprivate
11269 on task. */
11270 *gtask_clauses_ptr
11271 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11272 OMP_CLAUSE_FIRSTPRIVATE);
11273 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11274 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
11275 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11276 *gforo_clauses_ptr = c;
11277 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11279 else
11281 *gtask_clauses_ptr = c;
11282 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11284 break;
11285 /* These clauses go into outer taskloop clauses. */
11286 case OMP_CLAUSE_GRAINSIZE:
11287 case OMP_CLAUSE_NUM_TASKS:
11288 case OMP_CLAUSE_NOGROUP:
11289 *gforo_clauses_ptr = c;
11290 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11291 break;
11292 /* Taskloop clause we duplicate on both taskloops. */
11293 case OMP_CLAUSE_COLLAPSE:
11294 *gfor_clauses_ptr = c;
11295 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11296 *gforo_clauses_ptr = copy_node (c);
11297 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
11298 break;
11299 /* For lastprivate, keep the clause on inner taskloop, and add
11300 a shared clause on task. If the same decl is also firstprivate,
11301 add also firstprivate clause on the inner taskloop. */
11302 case OMP_CLAUSE_LASTPRIVATE:
11303 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
11305 /* For taskloop C++ lastprivate IVs, we want:
11306 1) private on outer taskloop
11307 2) firstprivate and shared on task
11308 3) lastprivate on inner taskloop */
11309 *gtask_clauses_ptr
11310 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11311 OMP_CLAUSE_FIRSTPRIVATE);
11312 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11313 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
11314 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11315 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
11316 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11317 OMP_CLAUSE_PRIVATE);
11318 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
11319 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
11320 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
11321 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
11323 *gfor_clauses_ptr = c;
11324 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11325 *gtask_clauses_ptr
11326 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
11327 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11328 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
11329 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
11330 gtask_clauses_ptr
11331 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11332 break;
11333 default:
11334 gcc_unreachable ();
11336 *gfor_clauses_ptr = NULL_TREE;
11337 *gtask_clauses_ptr = NULL_TREE;
11338 *gforo_clauses_ptr = NULL_TREE;
11339 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
11340 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
11341 NULL_TREE, NULL_TREE, NULL_TREE);
11342 gimple_omp_task_set_taskloop_p (g, true);
11343 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
11344 gomp_for *gforo
11345 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
11346 gimple_omp_for_collapse (gfor),
11347 gimple_omp_for_pre_body (gfor));
11348 gimple_omp_for_set_pre_body (gfor, NULL);
11349 gimple_omp_for_set_combined_p (gforo, true);
11350 gimple_omp_for_set_combined_into_p (gfor, true);
11351 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
11353 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
11354 tree v = create_tmp_var (type);
11355 gimple_omp_for_set_index (gforo, i, v);
11356 t = unshare_expr (gimple_omp_for_initial (gfor, i));
11357 gimple_omp_for_set_initial (gforo, i, t);
11358 gimple_omp_for_set_cond (gforo, i,
11359 gimple_omp_for_cond (gfor, i));
11360 t = unshare_expr (gimple_omp_for_final (gfor, i));
11361 gimple_omp_for_set_final (gforo, i, t);
11362 t = unshare_expr (gimple_omp_for_incr (gfor, i));
11363 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
11364 TREE_OPERAND (t, 0) = v;
11365 gimple_omp_for_set_incr (gforo, i, t);
11366 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
11367 OMP_CLAUSE_DECL (t) = v;
11368 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
11369 gimple_omp_for_set_clauses (gforo, t);
11371 gimplify_seq_add_stmt (pre_p, gforo);
11373 else
11374 gimplify_seq_add_stmt (pre_p, gfor);
11375 if (ret != GS_ALL_DONE)
11376 return GS_ERROR;
11377 *expr_p = NULL_TREE;
11378 return GS_ALL_DONE;
11381 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
11382 of OMP_TARGET's body. */
11384 static tree
11385 find_omp_teams (tree *tp, int *walk_subtrees, void *)
11387 *walk_subtrees = 0;
11388 switch (TREE_CODE (*tp))
11390 case OMP_TEAMS:
11391 return *tp;
11392 case BIND_EXPR:
11393 case STATEMENT_LIST:
11394 *walk_subtrees = 1;
11395 break;
11396 default:
11397 break;
11399 return NULL_TREE;
11402 /* Helper function of optimize_target_teams, determine if the expression
11403 can be computed safely before the target construct on the host. */
11405 static tree
11406 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
11408 splay_tree_node n;
11410 if (TYPE_P (*tp))
11412 *walk_subtrees = 0;
11413 return NULL_TREE;
11415 switch (TREE_CODE (*tp))
11417 case VAR_DECL:
11418 case PARM_DECL:
11419 case RESULT_DECL:
11420 *walk_subtrees = 0;
11421 if (error_operand_p (*tp)
11422 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
11423 || DECL_HAS_VALUE_EXPR_P (*tp)
11424 || DECL_THREAD_LOCAL_P (*tp)
11425 || TREE_SIDE_EFFECTS (*tp)
11426 || TREE_THIS_VOLATILE (*tp))
11427 return *tp;
11428 if (is_global_var (*tp)
11429 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
11430 || lookup_attribute ("omp declare target link",
11431 DECL_ATTRIBUTES (*tp))))
11432 return *tp;
11433 if (VAR_P (*tp)
11434 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
11435 && !is_global_var (*tp)
11436 && decl_function_context (*tp) == current_function_decl)
11437 return *tp;
11438 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11439 (splay_tree_key) *tp);
11440 if (n == NULL)
11442 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
11443 return NULL_TREE;
11444 return *tp;
11446 else if (n->value & GOVD_LOCAL)
11447 return *tp;
11448 else if (n->value & GOVD_FIRSTPRIVATE)
11449 return NULL_TREE;
11450 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
11451 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
11452 return NULL_TREE;
11453 return *tp;
11454 case INTEGER_CST:
11455 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
11456 return *tp;
11457 return NULL_TREE;
11458 case TARGET_EXPR:
11459 if (TARGET_EXPR_INITIAL (*tp)
11460 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
11461 return *tp;
11462 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
11463 walk_subtrees, NULL);
11464 /* Allow some reasonable subset of integral arithmetics. */
11465 case PLUS_EXPR:
11466 case MINUS_EXPR:
11467 case MULT_EXPR:
11468 case TRUNC_DIV_EXPR:
11469 case CEIL_DIV_EXPR:
11470 case FLOOR_DIV_EXPR:
11471 case ROUND_DIV_EXPR:
11472 case TRUNC_MOD_EXPR:
11473 case CEIL_MOD_EXPR:
11474 case FLOOR_MOD_EXPR:
11475 case ROUND_MOD_EXPR:
11476 case RDIV_EXPR:
11477 case EXACT_DIV_EXPR:
11478 case MIN_EXPR:
11479 case MAX_EXPR:
11480 case LSHIFT_EXPR:
11481 case RSHIFT_EXPR:
11482 case BIT_IOR_EXPR:
11483 case BIT_XOR_EXPR:
11484 case BIT_AND_EXPR:
11485 case NEGATE_EXPR:
11486 case ABS_EXPR:
11487 case BIT_NOT_EXPR:
11488 case NON_LVALUE_EXPR:
11489 CASE_CONVERT:
11490 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
11491 return *tp;
11492 return NULL_TREE;
11493 /* And disallow anything else, except for comparisons. */
11494 default:
11495 if (COMPARISON_CLASS_P (*tp))
11496 return NULL_TREE;
11497 return *tp;
11501 /* Try to determine if the num_teams and/or thread_limit expressions
11502 can have their values determined already before entering the
11503 target construct.
11504 INTEGER_CSTs trivially are,
11505 integral decls that are firstprivate (explicitly or implicitly)
11506 or explicitly map(always, to:) or map(always, tofrom:) on the target
11507 region too, and expressions involving simple arithmetics on those
11508 too, function calls are not ok, dereferencing something neither etc.
11509 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
11510 EXPR based on what we find:
11511 0 stands for clause not specified at all, use implementation default
11512 -1 stands for value that can't be determined easily before entering
11513 the target construct.
11514 If teams construct is not present at all, use 1 for num_teams
11515 and 0 for thread_limit (only one team is involved, and the thread
11516 limit is implementation defined. */
11518 static void
11519 optimize_target_teams (tree target, gimple_seq *pre_p)
11521 tree body = OMP_BODY (target);
11522 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
11523 tree num_teams = integer_zero_node;
11524 tree thread_limit = integer_zero_node;
11525 location_t num_teams_loc = EXPR_LOCATION (target);
11526 location_t thread_limit_loc = EXPR_LOCATION (target);
11527 tree c, *p, expr;
11528 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
11530 if (teams == NULL_TREE)
11531 num_teams = integer_one_node;
11532 else
11533 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
11535 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
11537 p = &num_teams;
11538 num_teams_loc = OMP_CLAUSE_LOCATION (c);
11540 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
11542 p = &thread_limit;
11543 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
11545 else
11546 continue;
11547 expr = OMP_CLAUSE_OPERAND (c, 0);
11548 if (TREE_CODE (expr) == INTEGER_CST)
11550 *p = expr;
11551 continue;
11553 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
11555 *p = integer_minus_one_node;
11556 continue;
11558 *p = expr;
11559 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
11560 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
11561 == GS_ERROR)
11563 gimplify_omp_ctxp = target_ctx;
11564 *p = integer_minus_one_node;
11565 continue;
11567 gimplify_omp_ctxp = target_ctx;
11568 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
11569 OMP_CLAUSE_OPERAND (c, 0) = *p;
11571 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
11572 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
11573 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
11574 OMP_TARGET_CLAUSES (target) = c;
11575 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
11576 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
11577 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
11578 OMP_TARGET_CLAUSES (target) = c;
11581 /* Gimplify the gross structure of several OMP constructs. */
11583 static void
11584 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
11586 tree expr = *expr_p;
11587 gimple *stmt;
11588 gimple_seq body = NULL;
11589 enum omp_region_type ort;
11591 switch (TREE_CODE (expr))
11593 case OMP_SECTIONS:
11594 case OMP_SINGLE:
11595 ort = ORT_WORKSHARE;
11596 break;
11597 case OMP_TARGET:
11598 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
11599 break;
11600 case OACC_KERNELS:
11601 ort = ORT_ACC_KERNELS;
11602 break;
11603 case OACC_PARALLEL:
11604 ort = ORT_ACC_PARALLEL;
11605 break;
11606 case OACC_DATA:
11607 ort = ORT_ACC_DATA;
11608 break;
11609 case OMP_TARGET_DATA:
11610 ort = ORT_TARGET_DATA;
11611 break;
11612 case OMP_TEAMS:
11613 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
11614 if (gimplify_omp_ctxp == NULL
11615 || (gimplify_omp_ctxp->region_type == ORT_TARGET
11616 && gimplify_omp_ctxp->outer_context == NULL
11617 && lookup_attribute ("omp declare target",
11618 DECL_ATTRIBUTES (current_function_decl))))
11619 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
11620 break;
11621 case OACC_HOST_DATA:
11622 ort = ORT_ACC_HOST_DATA;
11623 break;
11624 default:
11625 gcc_unreachable ();
11627 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
11628 TREE_CODE (expr));
11629 if (TREE_CODE (expr) == OMP_TARGET)
11630 optimize_target_teams (expr, pre_p);
11631 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
11632 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
11634 push_gimplify_context ();
11635 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
11636 if (gimple_code (g) == GIMPLE_BIND)
11637 pop_gimplify_context (g);
11638 else
11639 pop_gimplify_context (NULL);
11640 if ((ort & ORT_TARGET_DATA) != 0)
11642 enum built_in_function end_ix;
11643 switch (TREE_CODE (expr))
11645 case OACC_DATA:
11646 case OACC_HOST_DATA:
11647 end_ix = BUILT_IN_GOACC_DATA_END;
11648 break;
11649 case OMP_TARGET_DATA:
11650 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
11651 break;
11652 default:
11653 gcc_unreachable ();
11655 tree fn = builtin_decl_explicit (end_ix);
11656 g = gimple_build_call (fn, 0);
11657 gimple_seq cleanup = NULL;
11658 gimple_seq_add_stmt (&cleanup, g);
11659 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11660 body = NULL;
11661 gimple_seq_add_stmt (&body, g);
11664 else
11665 gimplify_and_add (OMP_BODY (expr), &body);
11666 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
11667 TREE_CODE (expr));
11669 switch (TREE_CODE (expr))
11671 case OACC_DATA:
11672 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
11673 OMP_CLAUSES (expr));
11674 break;
11675 case OACC_KERNELS:
11676 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
11677 OMP_CLAUSES (expr));
11678 break;
11679 case OACC_HOST_DATA:
11680 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
11681 OMP_CLAUSES (expr));
11682 break;
11683 case OACC_PARALLEL:
11684 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
11685 OMP_CLAUSES (expr));
11686 break;
11687 case OMP_SECTIONS:
11688 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
11689 break;
11690 case OMP_SINGLE:
11691 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
11692 break;
11693 case OMP_TARGET:
11694 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
11695 OMP_CLAUSES (expr));
11696 break;
11697 case OMP_TARGET_DATA:
11698 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
11699 OMP_CLAUSES (expr));
11700 break;
11701 case OMP_TEAMS:
11702 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
11703 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
11704 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
11705 break;
11706 default:
11707 gcc_unreachable ();
11710 gimplify_seq_add_stmt (pre_p, stmt);
11711 *expr_p = NULL_TREE;
11714 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
11715 target update constructs. */
11717 static void
11718 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
11720 tree expr = *expr_p;
11721 int kind;
11722 gomp_target *stmt;
11723 enum omp_region_type ort = ORT_WORKSHARE;
11725 switch (TREE_CODE (expr))
11727 case OACC_ENTER_DATA:
11728 case OACC_EXIT_DATA:
11729 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
11730 ort = ORT_ACC;
11731 break;
11732 case OACC_UPDATE:
11733 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
11734 ort = ORT_ACC;
11735 break;
11736 case OMP_TARGET_UPDATE:
11737 kind = GF_OMP_TARGET_KIND_UPDATE;
11738 break;
11739 case OMP_TARGET_ENTER_DATA:
11740 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
11741 break;
11742 case OMP_TARGET_EXIT_DATA:
11743 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
11744 break;
11745 default:
11746 gcc_unreachable ();
11748 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
11749 ort, TREE_CODE (expr));
11750 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
11751 TREE_CODE (expr));
11752 if (TREE_CODE (expr) == OACC_UPDATE
11753 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
11754 OMP_CLAUSE_IF_PRESENT))
11756 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
11757 clause. */
11758 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11759 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
11760 switch (OMP_CLAUSE_MAP_KIND (c))
11762 case GOMP_MAP_FORCE_TO:
11763 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
11764 break;
11765 case GOMP_MAP_FORCE_FROM:
11766 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
11767 break;
11768 default:
11769 break;
11772 else if (TREE_CODE (expr) == OACC_EXIT_DATA
11773 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
11774 OMP_CLAUSE_FINALIZE))
11776 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote that "finalize"
11777 semantics apply to all mappings of this OpenACC directive. */
11778 bool finalize_marked = false;
11779 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11780 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
11781 switch (OMP_CLAUSE_MAP_KIND (c))
11783 case GOMP_MAP_FROM:
11784 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
11785 finalize_marked = true;
11786 break;
11787 case GOMP_MAP_RELEASE:
11788 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
11789 finalize_marked = true;
11790 break;
11791 default:
11792 /* Check consistency: libgomp relies on the very first data
11793 mapping clause being marked, so make sure we did that before
11794 any other mapping clauses. */
11795 gcc_assert (finalize_marked);
11796 break;
11799 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
11801 gimplify_seq_add_stmt (pre_p, stmt);
11802 *expr_p = NULL_TREE;
11805 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
11806 stabilized the lhs of the atomic operation as *ADDR. Return true if
11807 EXPR is this stabilized form. */
11809 static bool
11810 goa_lhs_expr_p (tree expr, tree addr)
11812 /* Also include casts to other type variants. The C front end is fond
11813 of adding these for e.g. volatile variables. This is like
11814 STRIP_TYPE_NOPS but includes the main variant lookup. */
11815 STRIP_USELESS_TYPE_CONVERSION (expr);
11817 if (TREE_CODE (expr) == INDIRECT_REF)
11819 expr = TREE_OPERAND (expr, 0);
11820 while (expr != addr
11821 && (CONVERT_EXPR_P (expr)
11822 || TREE_CODE (expr) == NON_LVALUE_EXPR)
11823 && TREE_CODE (expr) == TREE_CODE (addr)
11824 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
11826 expr = TREE_OPERAND (expr, 0);
11827 addr = TREE_OPERAND (addr, 0);
11829 if (expr == addr)
11830 return true;
11831 return (TREE_CODE (addr) == ADDR_EXPR
11832 && TREE_CODE (expr) == ADDR_EXPR
11833 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
11835 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
11836 return true;
11837 return false;
11840 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
11841 expression does not involve the lhs, evaluate it into a temporary.
11842 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
11843 or -1 if an error was encountered. */
11845 static int
11846 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
11847 tree lhs_var)
11849 tree expr = *expr_p;
11850 int saw_lhs;
11852 if (goa_lhs_expr_p (expr, lhs_addr))
11854 *expr_p = lhs_var;
11855 return 1;
11857 if (is_gimple_val (expr))
11858 return 0;
11860 saw_lhs = 0;
11861 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
11863 case tcc_binary:
11864 case tcc_comparison:
11865 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
11866 lhs_var);
11867 /* FALLTHRU */
11868 case tcc_unary:
11869 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
11870 lhs_var);
11871 break;
11872 case tcc_expression:
11873 switch (TREE_CODE (expr))
11875 case TRUTH_ANDIF_EXPR:
11876 case TRUTH_ORIF_EXPR:
11877 case TRUTH_AND_EXPR:
11878 case TRUTH_OR_EXPR:
11879 case TRUTH_XOR_EXPR:
11880 case BIT_INSERT_EXPR:
11881 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
11882 lhs_addr, lhs_var);
11883 /* FALLTHRU */
11884 case TRUTH_NOT_EXPR:
11885 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
11886 lhs_addr, lhs_var);
11887 break;
11888 case COMPOUND_EXPR:
11889 /* Break out any preevaluations from cp_build_modify_expr. */
11890 for (; TREE_CODE (expr) == COMPOUND_EXPR;
11891 expr = TREE_OPERAND (expr, 1))
11892 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
11893 *expr_p = expr;
11894 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
11895 default:
11896 break;
11898 break;
11899 case tcc_reference:
11900 if (TREE_CODE (expr) == BIT_FIELD_REF)
11901 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
11902 lhs_addr, lhs_var);
11903 break;
11904 default:
11905 break;
11908 if (saw_lhs == 0)
11910 enum gimplify_status gs;
11911 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
11912 if (gs != GS_ALL_DONE)
11913 saw_lhs = -1;
11916 return saw_lhs;
11919 /* Gimplify an OMP_ATOMIC statement. */
11921 static enum gimplify_status
11922 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
11924 tree addr = TREE_OPERAND (*expr_p, 0);
11925 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
11926 ? NULL : TREE_OPERAND (*expr_p, 1);
11927 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
11928 tree tmp_load;
11929 gomp_atomic_load *loadstmt;
11930 gomp_atomic_store *storestmt;
11932 tmp_load = create_tmp_reg (type);
11933 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
11934 return GS_ERROR;
11936 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
11937 != GS_ALL_DONE)
11938 return GS_ERROR;
11940 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
11941 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
11942 gimplify_seq_add_stmt (pre_p, loadstmt);
11943 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
11944 != GS_ALL_DONE)
11945 return GS_ERROR;
11947 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
11948 rhs = tmp_load;
11949 storestmt
11950 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
11951 gimplify_seq_add_stmt (pre_p, storestmt);
11952 switch (TREE_CODE (*expr_p))
11954 case OMP_ATOMIC_READ:
11955 case OMP_ATOMIC_CAPTURE_OLD:
11956 *expr_p = tmp_load;
11957 gimple_omp_atomic_set_need_value (loadstmt);
11958 break;
11959 case OMP_ATOMIC_CAPTURE_NEW:
11960 *expr_p = rhs;
11961 gimple_omp_atomic_set_need_value (storestmt);
11962 break;
11963 default:
11964 *expr_p = NULL;
11965 break;
11968 return GS_ALL_DONE;
11971 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
11972 body, and adding some EH bits. */
11974 static enum gimplify_status
11975 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
11977 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
11978 gimple *body_stmt;
11979 gtransaction *trans_stmt;
11980 gimple_seq body = NULL;
11981 int subcode = 0;
11983 /* Wrap the transaction body in a BIND_EXPR so we have a context
11984 where to put decls for OMP. */
11985 if (TREE_CODE (tbody) != BIND_EXPR)
11987 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
11988 TREE_SIDE_EFFECTS (bind) = 1;
11989 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
11990 TRANSACTION_EXPR_BODY (expr) = bind;
11993 push_gimplify_context ();
11994 temp = voidify_wrapper_expr (*expr_p, NULL);
11996 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
11997 pop_gimplify_context (body_stmt);
11999 trans_stmt = gimple_build_transaction (body);
12000 if (TRANSACTION_EXPR_OUTER (expr))
12001 subcode = GTMA_IS_OUTER;
12002 else if (TRANSACTION_EXPR_RELAXED (expr))
12003 subcode = GTMA_IS_RELAXED;
12004 gimple_transaction_set_subcode (trans_stmt, subcode);
12006 gimplify_seq_add_stmt (pre_p, trans_stmt);
12008 if (temp)
12010 *expr_p = temp;
12011 return GS_OK;
12014 *expr_p = NULL_TREE;
12015 return GS_ALL_DONE;
12018 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
12019 is the OMP_BODY of the original EXPR (which has already been
12020 gimplified so it's not present in the EXPR).
12022 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
12024 static gimple *
12025 gimplify_omp_ordered (tree expr, gimple_seq body)
12027 tree c, decls;
12028 int failures = 0;
12029 unsigned int i;
12030 tree source_c = NULL_TREE;
12031 tree sink_c = NULL_TREE;
12033 if (gimplify_omp_ctxp)
12035 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12036 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12037 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
12038 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
12039 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
12041 error_at (OMP_CLAUSE_LOCATION (c),
12042 "%<ordered%> construct with %<depend%> clause must be "
12043 "closely nested inside a loop with %<ordered%> clause "
12044 "with a parameter");
12045 failures++;
12047 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12048 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
12050 bool fail = false;
12051 for (decls = OMP_CLAUSE_DECL (c), i = 0;
12052 decls && TREE_CODE (decls) == TREE_LIST;
12053 decls = TREE_CHAIN (decls), ++i)
12054 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
12055 continue;
12056 else if (TREE_VALUE (decls)
12057 != gimplify_omp_ctxp->loop_iter_var[2 * i])
12059 error_at (OMP_CLAUSE_LOCATION (c),
12060 "variable %qE is not an iteration "
12061 "of outermost loop %d, expected %qE",
12062 TREE_VALUE (decls), i + 1,
12063 gimplify_omp_ctxp->loop_iter_var[2 * i]);
12064 fail = true;
12065 failures++;
12067 else
12068 TREE_VALUE (decls)
12069 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
12070 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
12072 error_at (OMP_CLAUSE_LOCATION (c),
12073 "number of variables in %<depend(sink)%> "
12074 "clause does not match number of "
12075 "iteration variables");
12076 failures++;
12078 sink_c = c;
12080 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12081 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
12083 if (source_c)
12085 error_at (OMP_CLAUSE_LOCATION (c),
12086 "more than one %<depend(source)%> clause on an "
12087 "%<ordered%> construct");
12088 failures++;
12090 else
12091 source_c = c;
12094 if (source_c && sink_c)
12096 error_at (OMP_CLAUSE_LOCATION (source_c),
12097 "%<depend(source)%> clause specified together with "
12098 "%<depend(sink:)%> clauses on the same construct");
12099 failures++;
12102 if (failures)
12103 return gimple_build_nop ();
12104 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
12107 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
12108 expression produces a value to be used as an operand inside a GIMPLE
12109 statement, the value will be stored back in *EXPR_P. This value will
12110 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
12111 an SSA_NAME. The corresponding sequence of GIMPLE statements is
12112 emitted in PRE_P and POST_P.
12114 Additionally, this process may overwrite parts of the input
12115 expression during gimplification. Ideally, it should be
12116 possible to do non-destructive gimplification.
12118 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
12119 the expression needs to evaluate to a value to be used as
12120 an operand in a GIMPLE statement, this value will be stored in
12121 *EXPR_P on exit. This happens when the caller specifies one
12122 of fb_lvalue or fb_rvalue fallback flags.
12124 PRE_P will contain the sequence of GIMPLE statements corresponding
12125 to the evaluation of EXPR and all the side-effects that must
12126 be executed before the main expression. On exit, the last
12127 statement of PRE_P is the core statement being gimplified. For
12128 instance, when gimplifying 'if (++a)' the last statement in
12129 PRE_P will be 'if (t.1)' where t.1 is the result of
12130 pre-incrementing 'a'.
12132 POST_P will contain the sequence of GIMPLE statements corresponding
12133 to the evaluation of all the side-effects that must be executed
12134 after the main expression. If this is NULL, the post
12135 side-effects are stored at the end of PRE_P.
12137 The reason why the output is split in two is to handle post
12138 side-effects explicitly. In some cases, an expression may have
12139 inner and outer post side-effects which need to be emitted in
12140 an order different from the one given by the recursive
12141 traversal. For instance, for the expression (*p--)++ the post
12142 side-effects of '--' must actually occur *after* the post
12143 side-effects of '++'. However, gimplification will first visit
12144 the inner expression, so if a separate POST sequence was not
12145 used, the resulting sequence would be:
12147 1 t.1 = *p
12148 2 p = p - 1
12149 3 t.2 = t.1 + 1
12150 4 *p = t.2
12152 However, the post-decrement operation in line #2 must not be
12153 evaluated until after the store to *p at line #4, so the
12154 correct sequence should be:
12156 1 t.1 = *p
12157 2 t.2 = t.1 + 1
12158 3 *p = t.2
12159 4 p = p - 1
12161 So, by specifying a separate post queue, it is possible
12162 to emit the post side-effects in the correct order.
12163 If POST_P is NULL, an internal queue will be used. Before
12164 returning to the caller, the sequence POST_P is appended to
12165 the main output sequence PRE_P.
12167 GIMPLE_TEST_F points to a function that takes a tree T and
12168 returns nonzero if T is in the GIMPLE form requested by the
12169 caller. The GIMPLE predicates are in gimple.c.
12171 FALLBACK tells the function what sort of a temporary we want if
12172 gimplification cannot produce an expression that complies with
12173 GIMPLE_TEST_F.
12175 fb_none means that no temporary should be generated
12176 fb_rvalue means that an rvalue is OK to generate
12177 fb_lvalue means that an lvalue is OK to generate
12178 fb_either means that either is OK, but an lvalue is preferable.
12179 fb_mayfail means that gimplification may fail (in which case
12180 GS_ERROR will be returned)
12182 The return value is either GS_ERROR or GS_ALL_DONE, since this
12183 function iterates until EXPR is completely gimplified or an error
12184 occurs. */
12186 enum gimplify_status
12187 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12188 bool (*gimple_test_f) (tree), fallback_t fallback)
12190 tree tmp;
12191 gimple_seq internal_pre = NULL;
12192 gimple_seq internal_post = NULL;
12193 tree save_expr;
12194 bool is_statement;
12195 location_t saved_location;
12196 enum gimplify_status ret;
12197 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
12198 tree label;
12200 save_expr = *expr_p;
12201 if (save_expr == NULL_TREE)
12202 return GS_ALL_DONE;
12204 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
12205 is_statement = gimple_test_f == is_gimple_stmt;
12206 if (is_statement)
12207 gcc_assert (pre_p);
12209 /* Consistency checks. */
12210 if (gimple_test_f == is_gimple_reg)
12211 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
12212 else if (gimple_test_f == is_gimple_val
12213 || gimple_test_f == is_gimple_call_addr
12214 || gimple_test_f == is_gimple_condexpr
12215 || gimple_test_f == is_gimple_mem_rhs
12216 || gimple_test_f == is_gimple_mem_rhs_or_call
12217 || gimple_test_f == is_gimple_reg_rhs
12218 || gimple_test_f == is_gimple_reg_rhs_or_call
12219 || gimple_test_f == is_gimple_asm_val
12220 || gimple_test_f == is_gimple_mem_ref_addr)
12221 gcc_assert (fallback & fb_rvalue);
12222 else if (gimple_test_f == is_gimple_min_lval
12223 || gimple_test_f == is_gimple_lvalue)
12224 gcc_assert (fallback & fb_lvalue);
12225 else if (gimple_test_f == is_gimple_addressable)
12226 gcc_assert (fallback & fb_either);
12227 else if (gimple_test_f == is_gimple_stmt)
12228 gcc_assert (fallback == fb_none);
12229 else
12231 /* We should have recognized the GIMPLE_TEST_F predicate to
12232 know what kind of fallback to use in case a temporary is
12233 needed to hold the value or address of *EXPR_P. */
12234 gcc_unreachable ();
12237 /* We used to check the predicate here and return immediately if it
12238 succeeds. This is wrong; the design is for gimplification to be
12239 idempotent, and for the predicates to only test for valid forms, not
12240 whether they are fully simplified. */
12241 if (pre_p == NULL)
12242 pre_p = &internal_pre;
12244 if (post_p == NULL)
12245 post_p = &internal_post;
12247 /* Remember the last statements added to PRE_P and POST_P. Every
12248 new statement added by the gimplification helpers needs to be
12249 annotated with location information. To centralize the
12250 responsibility, we remember the last statement that had been
12251 added to both queues before gimplifying *EXPR_P. If
12252 gimplification produces new statements in PRE_P and POST_P, those
12253 statements will be annotated with the same location information
12254 as *EXPR_P. */
12255 pre_last_gsi = gsi_last (*pre_p);
12256 post_last_gsi = gsi_last (*post_p);
12258 saved_location = input_location;
12259 if (save_expr != error_mark_node
12260 && EXPR_HAS_LOCATION (*expr_p))
12261 input_location = EXPR_LOCATION (*expr_p);
12263 /* Loop over the specific gimplifiers until the toplevel node
12264 remains the same. */
12267 /* Strip away as many useless type conversions as possible
12268 at the toplevel. */
12269 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
12271 /* Remember the expr. */
12272 save_expr = *expr_p;
12274 /* Die, die, die, my darling. */
12275 if (error_operand_p (save_expr))
12277 ret = GS_ERROR;
12278 break;
12281 /* Do any language-specific gimplification. */
12282 ret = ((enum gimplify_status)
12283 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
12284 if (ret == GS_OK)
12286 if (*expr_p == NULL_TREE)
12287 break;
12288 if (*expr_p != save_expr)
12289 continue;
12291 else if (ret != GS_UNHANDLED)
12292 break;
12294 /* Make sure that all the cases set 'ret' appropriately. */
12295 ret = GS_UNHANDLED;
12296 switch (TREE_CODE (*expr_p))
12298 /* First deal with the special cases. */
12300 case POSTINCREMENT_EXPR:
12301 case POSTDECREMENT_EXPR:
12302 case PREINCREMENT_EXPR:
12303 case PREDECREMENT_EXPR:
12304 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
12305 fallback != fb_none,
12306 TREE_TYPE (*expr_p));
12307 break;
12309 case VIEW_CONVERT_EXPR:
12310 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
12311 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
12313 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12314 post_p, is_gimple_val, fb_rvalue);
12315 recalculate_side_effects (*expr_p);
12316 break;
12318 /* Fallthru. */
12320 case ARRAY_REF:
12321 case ARRAY_RANGE_REF:
12322 case REALPART_EXPR:
12323 case IMAGPART_EXPR:
12324 case COMPONENT_REF:
12325 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
12326 fallback ? fallback : fb_rvalue);
12327 break;
12329 case COND_EXPR:
12330 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
12332 /* C99 code may assign to an array in a structure value of a
12333 conditional expression, and this has undefined behavior
12334 only on execution, so create a temporary if an lvalue is
12335 required. */
12336 if (fallback == fb_lvalue)
12338 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12339 mark_addressable (*expr_p);
12340 ret = GS_OK;
12342 break;
12344 case CALL_EXPR:
12345 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
12347 /* C99 code may assign to an array in a structure returned
12348 from a function, and this has undefined behavior only on
12349 execution, so create a temporary if an lvalue is
12350 required. */
12351 if (fallback == fb_lvalue)
12353 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12354 mark_addressable (*expr_p);
12355 ret = GS_OK;
12357 break;
12359 case TREE_LIST:
12360 gcc_unreachable ();
12362 case COMPOUND_EXPR:
12363 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
12364 break;
12366 case COMPOUND_LITERAL_EXPR:
12367 ret = gimplify_compound_literal_expr (expr_p, pre_p,
12368 gimple_test_f, fallback);
12369 break;
12371 case MODIFY_EXPR:
12372 case INIT_EXPR:
12373 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
12374 fallback != fb_none);
12375 break;
12377 case TRUTH_ANDIF_EXPR:
12378 case TRUTH_ORIF_EXPR:
12380 /* Preserve the original type of the expression and the
12381 source location of the outer expression. */
12382 tree org_type = TREE_TYPE (*expr_p);
12383 *expr_p = gimple_boolify (*expr_p);
12384 *expr_p = build3_loc (input_location, COND_EXPR,
12385 org_type, *expr_p,
12386 fold_convert_loc
12387 (input_location,
12388 org_type, boolean_true_node),
12389 fold_convert_loc
12390 (input_location,
12391 org_type, boolean_false_node));
12392 ret = GS_OK;
12393 break;
12396 case TRUTH_NOT_EXPR:
12398 tree type = TREE_TYPE (*expr_p);
12399 /* The parsers are careful to generate TRUTH_NOT_EXPR
12400 only with operands that are always zero or one.
12401 We do not fold here but handle the only interesting case
12402 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
12403 *expr_p = gimple_boolify (*expr_p);
12404 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
12405 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
12406 TREE_TYPE (*expr_p),
12407 TREE_OPERAND (*expr_p, 0));
12408 else
12409 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
12410 TREE_TYPE (*expr_p),
12411 TREE_OPERAND (*expr_p, 0),
12412 build_int_cst (TREE_TYPE (*expr_p), 1));
12413 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
12414 *expr_p = fold_convert_loc (input_location, type, *expr_p);
12415 ret = GS_OK;
12416 break;
12419 case ADDR_EXPR:
12420 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
12421 break;
12423 case ANNOTATE_EXPR:
12425 tree cond = TREE_OPERAND (*expr_p, 0);
12426 tree kind = TREE_OPERAND (*expr_p, 1);
12427 tree data = TREE_OPERAND (*expr_p, 2);
12428 tree type = TREE_TYPE (cond);
12429 if (!INTEGRAL_TYPE_P (type))
12431 *expr_p = cond;
12432 ret = GS_OK;
12433 break;
12435 tree tmp = create_tmp_var (type);
12436 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
12437 gcall *call
12438 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
12439 gimple_call_set_lhs (call, tmp);
12440 gimplify_seq_add_stmt (pre_p, call);
12441 *expr_p = tmp;
12442 ret = GS_ALL_DONE;
12443 break;
12446 case VA_ARG_EXPR:
12447 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
12448 break;
12450 CASE_CONVERT:
12451 if (IS_EMPTY_STMT (*expr_p))
12453 ret = GS_ALL_DONE;
12454 break;
12457 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
12458 || fallback == fb_none)
12460 /* Just strip a conversion to void (or in void context) and
12461 try again. */
12462 *expr_p = TREE_OPERAND (*expr_p, 0);
12463 ret = GS_OK;
12464 break;
12467 ret = gimplify_conversion (expr_p);
12468 if (ret == GS_ERROR)
12469 break;
12470 if (*expr_p != save_expr)
12471 break;
12472 /* FALLTHRU */
12474 case FIX_TRUNC_EXPR:
12475 /* unary_expr: ... | '(' cast ')' val | ... */
12476 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12477 is_gimple_val, fb_rvalue);
12478 recalculate_side_effects (*expr_p);
12479 break;
12481 case INDIRECT_REF:
12483 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
12484 bool notrap = TREE_THIS_NOTRAP (*expr_p);
12485 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
12487 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
12488 if (*expr_p != save_expr)
12490 ret = GS_OK;
12491 break;
12494 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12495 is_gimple_reg, fb_rvalue);
12496 if (ret == GS_ERROR)
12497 break;
12499 recalculate_side_effects (*expr_p);
12500 *expr_p = fold_build2_loc (input_location, MEM_REF,
12501 TREE_TYPE (*expr_p),
12502 TREE_OPERAND (*expr_p, 0),
12503 build_int_cst (saved_ptr_type, 0));
12504 TREE_THIS_VOLATILE (*expr_p) = volatilep;
12505 TREE_THIS_NOTRAP (*expr_p) = notrap;
12506 ret = GS_OK;
12507 break;
12510 /* We arrive here through the various re-gimplifcation paths. */
12511 case MEM_REF:
12512 /* First try re-folding the whole thing. */
12513 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
12514 TREE_OPERAND (*expr_p, 0),
12515 TREE_OPERAND (*expr_p, 1));
12516 if (tmp)
12518 REF_REVERSE_STORAGE_ORDER (tmp)
12519 = REF_REVERSE_STORAGE_ORDER (*expr_p);
12520 *expr_p = tmp;
12521 recalculate_side_effects (*expr_p);
12522 ret = GS_OK;
12523 break;
12525 /* Avoid re-gimplifying the address operand if it is already
12526 in suitable form. Re-gimplifying would mark the address
12527 operand addressable. Always gimplify when not in SSA form
12528 as we still may have to gimplify decls with value-exprs. */
12529 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
12530 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
12532 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12533 is_gimple_mem_ref_addr, fb_rvalue);
12534 if (ret == GS_ERROR)
12535 break;
12537 recalculate_side_effects (*expr_p);
12538 ret = GS_ALL_DONE;
12539 break;
12541 /* Constants need not be gimplified. */
12542 case INTEGER_CST:
12543 case REAL_CST:
12544 case FIXED_CST:
12545 case STRING_CST:
12546 case COMPLEX_CST:
12547 case VECTOR_CST:
12548 /* Drop the overflow flag on constants, we do not want
12549 that in the GIMPLE IL. */
12550 if (TREE_OVERFLOW_P (*expr_p))
12551 *expr_p = drop_tree_overflow (*expr_p);
12552 ret = GS_ALL_DONE;
12553 break;
12555 case CONST_DECL:
12556 /* If we require an lvalue, such as for ADDR_EXPR, retain the
12557 CONST_DECL node. Otherwise the decl is replaceable by its
12558 value. */
12559 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
12560 if (fallback & fb_lvalue)
12561 ret = GS_ALL_DONE;
12562 else
12564 *expr_p = DECL_INITIAL (*expr_p);
12565 ret = GS_OK;
12567 break;
12569 case DECL_EXPR:
12570 ret = gimplify_decl_expr (expr_p, pre_p);
12571 break;
12573 case BIND_EXPR:
12574 ret = gimplify_bind_expr (expr_p, pre_p);
12575 break;
12577 case LOOP_EXPR:
12578 ret = gimplify_loop_expr (expr_p, pre_p);
12579 break;
12581 case SWITCH_EXPR:
12582 ret = gimplify_switch_expr (expr_p, pre_p);
12583 break;
12585 case EXIT_EXPR:
12586 ret = gimplify_exit_expr (expr_p);
12587 break;
12589 case GOTO_EXPR:
12590 /* If the target is not LABEL, then it is a computed jump
12591 and the target needs to be gimplified. */
12592 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
12594 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
12595 NULL, is_gimple_val, fb_rvalue);
12596 if (ret == GS_ERROR)
12597 break;
12599 gimplify_seq_add_stmt (pre_p,
12600 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
12601 ret = GS_ALL_DONE;
12602 break;
12604 case PREDICT_EXPR:
12605 gimplify_seq_add_stmt (pre_p,
12606 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
12607 PREDICT_EXPR_OUTCOME (*expr_p)));
12608 ret = GS_ALL_DONE;
12609 break;
12611 case LABEL_EXPR:
12612 ret = gimplify_label_expr (expr_p, pre_p);
12613 label = LABEL_EXPR_LABEL (*expr_p);
12614 gcc_assert (decl_function_context (label) == current_function_decl);
12616 /* If the label is used in a goto statement, or address of the label
12617 is taken, we need to unpoison all variables that were seen so far.
12618 Doing so would prevent us from reporting a false positives. */
12619 if (asan_poisoned_variables
12620 && asan_used_labels != NULL
12621 && asan_used_labels->contains (label))
12622 asan_poison_variables (asan_poisoned_variables, false, pre_p);
12623 break;
12625 case CASE_LABEL_EXPR:
12626 ret = gimplify_case_label_expr (expr_p, pre_p);
12628 if (gimplify_ctxp->live_switch_vars)
12629 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
12630 pre_p);
12631 break;
12633 case RETURN_EXPR:
12634 ret = gimplify_return_expr (*expr_p, pre_p);
12635 break;
12637 case CONSTRUCTOR:
12638 /* Don't reduce this in place; let gimplify_init_constructor work its
12639 magic. Buf if we're just elaborating this for side effects, just
12640 gimplify any element that has side-effects. */
12641 if (fallback == fb_none)
12643 unsigned HOST_WIDE_INT ix;
12644 tree val;
12645 tree temp = NULL_TREE;
12646 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
12647 if (TREE_SIDE_EFFECTS (val))
12648 append_to_statement_list (val, &temp);
12650 *expr_p = temp;
12651 ret = temp ? GS_OK : GS_ALL_DONE;
12653 /* C99 code may assign to an array in a constructed
12654 structure or union, and this has undefined behavior only
12655 on execution, so create a temporary if an lvalue is
12656 required. */
12657 else if (fallback == fb_lvalue)
12659 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12660 mark_addressable (*expr_p);
12661 ret = GS_OK;
12663 else
12664 ret = GS_ALL_DONE;
12665 break;
12667 /* The following are special cases that are not handled by the
12668 original GIMPLE grammar. */
12670 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
12671 eliminated. */
12672 case SAVE_EXPR:
12673 ret = gimplify_save_expr (expr_p, pre_p, post_p);
12674 break;
12676 case BIT_FIELD_REF:
12677 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12678 post_p, is_gimple_lvalue, fb_either);
12679 recalculate_side_effects (*expr_p);
12680 break;
12682 case TARGET_MEM_REF:
12684 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
12686 if (TMR_BASE (*expr_p))
12687 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
12688 post_p, is_gimple_mem_ref_addr, fb_either);
12689 if (TMR_INDEX (*expr_p))
12690 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
12691 post_p, is_gimple_val, fb_rvalue);
12692 if (TMR_INDEX2 (*expr_p))
12693 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
12694 post_p, is_gimple_val, fb_rvalue);
12695 /* TMR_STEP and TMR_OFFSET are always integer constants. */
12696 ret = MIN (r0, r1);
12698 break;
12700 case NON_LVALUE_EXPR:
12701 /* This should have been stripped above. */
12702 gcc_unreachable ();
12704 case ASM_EXPR:
12705 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
12706 break;
12708 case TRY_FINALLY_EXPR:
12709 case TRY_CATCH_EXPR:
12711 gimple_seq eval, cleanup;
12712 gtry *try_;
12714 /* Calls to destructors are generated automatically in FINALLY/CATCH
12715 block. They should have location as UNKNOWN_LOCATION. However,
12716 gimplify_call_expr will reset these call stmts to input_location
12717 if it finds stmt's location is unknown. To prevent resetting for
12718 destructors, we set the input_location to unknown.
12719 Note that this only affects the destructor calls in FINALLY/CATCH
12720 block, and will automatically reset to its original value by the
12721 end of gimplify_expr. */
12722 input_location = UNKNOWN_LOCATION;
12723 eval = cleanup = NULL;
12724 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
12725 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
12726 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
12727 if (gimple_seq_empty_p (cleanup))
12729 gimple_seq_add_seq (pre_p, eval);
12730 ret = GS_ALL_DONE;
12731 break;
12733 try_ = gimple_build_try (eval, cleanup,
12734 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
12735 ? GIMPLE_TRY_FINALLY
12736 : GIMPLE_TRY_CATCH);
12737 if (EXPR_HAS_LOCATION (save_expr))
12738 gimple_set_location (try_, EXPR_LOCATION (save_expr));
12739 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
12740 gimple_set_location (try_, saved_location);
12741 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
12742 gimple_try_set_catch_is_cleanup (try_,
12743 TRY_CATCH_IS_CLEANUP (*expr_p));
12744 gimplify_seq_add_stmt (pre_p, try_);
12745 ret = GS_ALL_DONE;
12746 break;
12749 case CLEANUP_POINT_EXPR:
12750 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
12751 break;
12753 case TARGET_EXPR:
12754 ret = gimplify_target_expr (expr_p, pre_p, post_p);
12755 break;
12757 case CATCH_EXPR:
12759 gimple *c;
12760 gimple_seq handler = NULL;
12761 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
12762 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
12763 gimplify_seq_add_stmt (pre_p, c);
12764 ret = GS_ALL_DONE;
12765 break;
12768 case EH_FILTER_EXPR:
12770 gimple *ehf;
12771 gimple_seq failure = NULL;
12773 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
12774 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
12775 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
12776 gimplify_seq_add_stmt (pre_p, ehf);
12777 ret = GS_ALL_DONE;
12778 break;
12781 case OBJ_TYPE_REF:
12783 enum gimplify_status r0, r1;
12784 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
12785 post_p, is_gimple_val, fb_rvalue);
12786 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
12787 post_p, is_gimple_val, fb_rvalue);
12788 TREE_SIDE_EFFECTS (*expr_p) = 0;
12789 ret = MIN (r0, r1);
12791 break;
12793 case LABEL_DECL:
12794 /* We get here when taking the address of a label. We mark
12795 the label as "forced"; meaning it can never be removed and
12796 it is a potential target for any computed goto. */
12797 FORCED_LABEL (*expr_p) = 1;
12798 ret = GS_ALL_DONE;
12799 break;
12801 case STATEMENT_LIST:
12802 ret = gimplify_statement_list (expr_p, pre_p);
12803 break;
12805 case WITH_SIZE_EXPR:
12807 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12808 post_p == &internal_post ? NULL : post_p,
12809 gimple_test_f, fallback);
12810 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12811 is_gimple_val, fb_rvalue);
12812 ret = GS_ALL_DONE;
12814 break;
12816 case VAR_DECL:
12817 case PARM_DECL:
12818 ret = gimplify_var_or_parm_decl (expr_p);
12819 break;
12821 case RESULT_DECL:
12822 /* When within an OMP context, notice uses of variables. */
12823 if (gimplify_omp_ctxp)
12824 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
12825 ret = GS_ALL_DONE;
12826 break;
12828 case DEBUG_EXPR_DECL:
12829 gcc_unreachable ();
12831 case DEBUG_BEGIN_STMT:
12832 gimplify_seq_add_stmt (pre_p,
12833 gimple_build_debug_begin_stmt
12834 (TREE_BLOCK (*expr_p),
12835 EXPR_LOCATION (*expr_p)));
12836 ret = GS_ALL_DONE;
12837 *expr_p = NULL;
12838 break;
12840 case SSA_NAME:
12841 /* Allow callbacks into the gimplifier during optimization. */
12842 ret = GS_ALL_DONE;
12843 break;
12845 case OMP_PARALLEL:
12846 gimplify_omp_parallel (expr_p, pre_p);
12847 ret = GS_ALL_DONE;
12848 break;
12850 case OMP_TASK:
12851 gimplify_omp_task (expr_p, pre_p);
12852 ret = GS_ALL_DONE;
12853 break;
12855 case OMP_FOR:
12856 case OMP_SIMD:
12857 case OMP_DISTRIBUTE:
12858 case OMP_TASKLOOP:
12859 case OACC_LOOP:
12860 ret = gimplify_omp_for (expr_p, pre_p);
12861 break;
12863 case OACC_CACHE:
12864 gimplify_oacc_cache (expr_p, pre_p);
12865 ret = GS_ALL_DONE;
12866 break;
12868 case OACC_DECLARE:
12869 gimplify_oacc_declare (expr_p, pre_p);
12870 ret = GS_ALL_DONE;
12871 break;
12873 case OACC_HOST_DATA:
12874 case OACC_DATA:
12875 case OACC_KERNELS:
12876 case OACC_PARALLEL:
12877 case OMP_SECTIONS:
12878 case OMP_SINGLE:
12879 case OMP_TARGET:
12880 case OMP_TARGET_DATA:
12881 case OMP_TEAMS:
12882 gimplify_omp_workshare (expr_p, pre_p);
12883 ret = GS_ALL_DONE;
12884 break;
12886 case OACC_ENTER_DATA:
12887 case OACC_EXIT_DATA:
12888 case OACC_UPDATE:
12889 case OMP_TARGET_UPDATE:
12890 case OMP_TARGET_ENTER_DATA:
12891 case OMP_TARGET_EXIT_DATA:
12892 gimplify_omp_target_update (expr_p, pre_p);
12893 ret = GS_ALL_DONE;
12894 break;
12896 case OMP_SECTION:
12897 case OMP_MASTER:
12898 case OMP_ORDERED:
12899 case OMP_CRITICAL:
12901 gimple_seq body = NULL;
12902 gimple *g;
12904 gimplify_and_add (OMP_BODY (*expr_p), &body);
12905 switch (TREE_CODE (*expr_p))
12907 case OMP_SECTION:
12908 g = gimple_build_omp_section (body);
12909 break;
12910 case OMP_MASTER:
12911 g = gimple_build_omp_master (body);
12912 break;
12913 case OMP_ORDERED:
12914 g = gimplify_omp_ordered (*expr_p, body);
12915 break;
12916 case OMP_CRITICAL:
12917 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
12918 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
12919 gimplify_adjust_omp_clauses (pre_p, body,
12920 &OMP_CRITICAL_CLAUSES (*expr_p),
12921 OMP_CRITICAL);
12922 g = gimple_build_omp_critical (body,
12923 OMP_CRITICAL_NAME (*expr_p),
12924 OMP_CRITICAL_CLAUSES (*expr_p));
12925 break;
12926 default:
12927 gcc_unreachable ();
12929 gimplify_seq_add_stmt (pre_p, g);
12930 ret = GS_ALL_DONE;
12931 break;
12934 case OMP_TASKGROUP:
12936 gimple_seq body = NULL;
12938 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
12939 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
12940 OMP_TASKGROUP);
12941 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
12942 gimplify_and_add (OMP_BODY (*expr_p), &body);
12943 gimple_seq cleanup = NULL;
12944 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
12945 gimple *g = gimple_build_call (fn, 0);
12946 gimple_seq_add_stmt (&cleanup, g);
12947 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
12948 body = NULL;
12949 gimple_seq_add_stmt (&body, g);
12950 g = gimple_build_omp_taskgroup (body, *pclauses);
12951 gimplify_seq_add_stmt (pre_p, g);
12952 ret = GS_ALL_DONE;
12953 break;
12956 case OMP_ATOMIC:
12957 case OMP_ATOMIC_READ:
12958 case OMP_ATOMIC_CAPTURE_OLD:
12959 case OMP_ATOMIC_CAPTURE_NEW:
12960 ret = gimplify_omp_atomic (expr_p, pre_p);
12961 break;
12963 case TRANSACTION_EXPR:
12964 ret = gimplify_transaction (expr_p, pre_p);
12965 break;
12967 case TRUTH_AND_EXPR:
12968 case TRUTH_OR_EXPR:
12969 case TRUTH_XOR_EXPR:
12971 tree orig_type = TREE_TYPE (*expr_p);
12972 tree new_type, xop0, xop1;
12973 *expr_p = gimple_boolify (*expr_p);
12974 new_type = TREE_TYPE (*expr_p);
12975 if (!useless_type_conversion_p (orig_type, new_type))
12977 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
12978 ret = GS_OK;
12979 break;
12982 /* Boolified binary truth expressions are semantically equivalent
12983 to bitwise binary expressions. Canonicalize them to the
12984 bitwise variant. */
12985 switch (TREE_CODE (*expr_p))
12987 case TRUTH_AND_EXPR:
12988 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
12989 break;
12990 case TRUTH_OR_EXPR:
12991 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
12992 break;
12993 case TRUTH_XOR_EXPR:
12994 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
12995 break;
12996 default:
12997 break;
12999 /* Now make sure that operands have compatible type to
13000 expression's new_type. */
13001 xop0 = TREE_OPERAND (*expr_p, 0);
13002 xop1 = TREE_OPERAND (*expr_p, 1);
13003 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
13004 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
13005 new_type,
13006 xop0);
13007 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
13008 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
13009 new_type,
13010 xop1);
13011 /* Continue classified as tcc_binary. */
13012 goto expr_2;
13015 case VEC_COND_EXPR:
13017 enum gimplify_status r0, r1, r2;
13019 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13020 post_p, is_gimple_condexpr, fb_rvalue);
13021 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13022 post_p, is_gimple_val, fb_rvalue);
13023 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
13024 post_p, is_gimple_val, fb_rvalue);
13026 ret = MIN (MIN (r0, r1), r2);
13027 recalculate_side_effects (*expr_p);
13029 break;
13031 case VEC_PERM_EXPR:
13032 /* Classified as tcc_expression. */
13033 goto expr_3;
13035 case BIT_INSERT_EXPR:
13036 /* Argument 3 is a constant. */
13037 goto expr_2;
13039 case POINTER_PLUS_EXPR:
13041 enum gimplify_status r0, r1;
13042 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13043 post_p, is_gimple_val, fb_rvalue);
13044 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13045 post_p, is_gimple_val, fb_rvalue);
13046 recalculate_side_effects (*expr_p);
13047 ret = MIN (r0, r1);
13048 break;
13051 default:
13052 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
13054 case tcc_comparison:
13055 /* Handle comparison of objects of non scalar mode aggregates
13056 with a call to memcmp. It would be nice to only have to do
13057 this for variable-sized objects, but then we'd have to allow
13058 the same nest of reference nodes we allow for MODIFY_EXPR and
13059 that's too complex.
13061 Compare scalar mode aggregates as scalar mode values. Using
13062 memcmp for them would be very inefficient at best, and is
13063 plain wrong if bitfields are involved. */
13065 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
13067 /* Vector comparisons need no boolification. */
13068 if (TREE_CODE (type) == VECTOR_TYPE)
13069 goto expr_2;
13070 else if (!AGGREGATE_TYPE_P (type))
13072 tree org_type = TREE_TYPE (*expr_p);
13073 *expr_p = gimple_boolify (*expr_p);
13074 if (!useless_type_conversion_p (org_type,
13075 TREE_TYPE (*expr_p)))
13077 *expr_p = fold_convert_loc (input_location,
13078 org_type, *expr_p);
13079 ret = GS_OK;
13081 else
13082 goto expr_2;
13084 else if (TYPE_MODE (type) != BLKmode)
13085 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
13086 else
13087 ret = gimplify_variable_sized_compare (expr_p);
13089 break;
13092 /* If *EXPR_P does not need to be special-cased, handle it
13093 according to its class. */
13094 case tcc_unary:
13095 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13096 post_p, is_gimple_val, fb_rvalue);
13097 break;
13099 case tcc_binary:
13100 expr_2:
13102 enum gimplify_status r0, r1;
13104 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13105 post_p, is_gimple_val, fb_rvalue);
13106 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13107 post_p, is_gimple_val, fb_rvalue);
13109 ret = MIN (r0, r1);
13110 break;
13113 expr_3:
13115 enum gimplify_status r0, r1, r2;
13117 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13118 post_p, is_gimple_val, fb_rvalue);
13119 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13120 post_p, is_gimple_val, fb_rvalue);
13121 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
13122 post_p, is_gimple_val, fb_rvalue);
13124 ret = MIN (MIN (r0, r1), r2);
13125 break;
13128 case tcc_declaration:
13129 case tcc_constant:
13130 ret = GS_ALL_DONE;
13131 goto dont_recalculate;
13133 default:
13134 gcc_unreachable ();
13137 recalculate_side_effects (*expr_p);
13139 dont_recalculate:
13140 break;
13143 gcc_assert (*expr_p || ret != GS_OK);
13145 while (ret == GS_OK);
13147 /* If we encountered an error_mark somewhere nested inside, either
13148 stub out the statement or propagate the error back out. */
13149 if (ret == GS_ERROR)
13151 if (is_statement)
13152 *expr_p = NULL;
13153 goto out;
13156 /* This was only valid as a return value from the langhook, which
13157 we handled. Make sure it doesn't escape from any other context. */
13158 gcc_assert (ret != GS_UNHANDLED);
13160 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
13162 /* We aren't looking for a value, and we don't have a valid
13163 statement. If it doesn't have side-effects, throw it away.
13164 We can also get here with code such as "*&&L;", where L is
13165 a LABEL_DECL that is marked as FORCED_LABEL. */
13166 if (TREE_CODE (*expr_p) == LABEL_DECL
13167 || !TREE_SIDE_EFFECTS (*expr_p))
13168 *expr_p = NULL;
13169 else if (!TREE_THIS_VOLATILE (*expr_p))
13171 /* This is probably a _REF that contains something nested that
13172 has side effects. Recurse through the operands to find it. */
13173 enum tree_code code = TREE_CODE (*expr_p);
13175 switch (code)
13177 case COMPONENT_REF:
13178 case REALPART_EXPR:
13179 case IMAGPART_EXPR:
13180 case VIEW_CONVERT_EXPR:
13181 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13182 gimple_test_f, fallback);
13183 break;
13185 case ARRAY_REF:
13186 case ARRAY_RANGE_REF:
13187 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13188 gimple_test_f, fallback);
13189 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
13190 gimple_test_f, fallback);
13191 break;
13193 default:
13194 /* Anything else with side-effects must be converted to
13195 a valid statement before we get here. */
13196 gcc_unreachable ();
13199 *expr_p = NULL;
13201 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
13202 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
13204 /* Historically, the compiler has treated a bare reference
13205 to a non-BLKmode volatile lvalue as forcing a load. */
13206 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
13208 /* Normally, we do not want to create a temporary for a
13209 TREE_ADDRESSABLE type because such a type should not be
13210 copied by bitwise-assignment. However, we make an
13211 exception here, as all we are doing here is ensuring that
13212 we read the bytes that make up the type. We use
13213 create_tmp_var_raw because create_tmp_var will abort when
13214 given a TREE_ADDRESSABLE type. */
13215 tree tmp = create_tmp_var_raw (type, "vol");
13216 gimple_add_tmp_var (tmp);
13217 gimplify_assign (tmp, *expr_p, pre_p);
13218 *expr_p = NULL;
13220 else
13221 /* We can't do anything useful with a volatile reference to
13222 an incomplete type, so just throw it away. Likewise for
13223 a BLKmode type, since any implicit inner load should
13224 already have been turned into an explicit one by the
13225 gimplification process. */
13226 *expr_p = NULL;
13229 /* If we are gimplifying at the statement level, we're done. Tack
13230 everything together and return. */
13231 if (fallback == fb_none || is_statement)
13233 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
13234 it out for GC to reclaim it. */
13235 *expr_p = NULL_TREE;
13237 if (!gimple_seq_empty_p (internal_pre)
13238 || !gimple_seq_empty_p (internal_post))
13240 gimplify_seq_add_seq (&internal_pre, internal_post);
13241 gimplify_seq_add_seq (pre_p, internal_pre);
13244 /* The result of gimplifying *EXPR_P is going to be the last few
13245 statements in *PRE_P and *POST_P. Add location information
13246 to all the statements that were added by the gimplification
13247 helpers. */
13248 if (!gimple_seq_empty_p (*pre_p))
13249 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
13251 if (!gimple_seq_empty_p (*post_p))
13252 annotate_all_with_location_after (*post_p, post_last_gsi,
13253 input_location);
13255 goto out;
13258 #ifdef ENABLE_GIMPLE_CHECKING
13259 if (*expr_p)
13261 enum tree_code code = TREE_CODE (*expr_p);
13262 /* These expressions should already be in gimple IR form. */
13263 gcc_assert (code != MODIFY_EXPR
13264 && code != ASM_EXPR
13265 && code != BIND_EXPR
13266 && code != CATCH_EXPR
13267 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
13268 && code != EH_FILTER_EXPR
13269 && code != GOTO_EXPR
13270 && code != LABEL_EXPR
13271 && code != LOOP_EXPR
13272 && code != SWITCH_EXPR
13273 && code != TRY_FINALLY_EXPR
13274 && code != OACC_PARALLEL
13275 && code != OACC_KERNELS
13276 && code != OACC_DATA
13277 && code != OACC_HOST_DATA
13278 && code != OACC_DECLARE
13279 && code != OACC_UPDATE
13280 && code != OACC_ENTER_DATA
13281 && code != OACC_EXIT_DATA
13282 && code != OACC_CACHE
13283 && code != OMP_CRITICAL
13284 && code != OMP_FOR
13285 && code != OACC_LOOP
13286 && code != OMP_MASTER
13287 && code != OMP_TASKGROUP
13288 && code != OMP_ORDERED
13289 && code != OMP_PARALLEL
13290 && code != OMP_SECTIONS
13291 && code != OMP_SECTION
13292 && code != OMP_SINGLE);
13294 #endif
13296 /* Otherwise we're gimplifying a subexpression, so the resulting
13297 value is interesting. If it's a valid operand that matches
13298 GIMPLE_TEST_F, we're done. Unless we are handling some
13299 post-effects internally; if that's the case, we need to copy into
13300 a temporary before adding the post-effects to POST_P. */
13301 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
13302 goto out;
13304 /* Otherwise, we need to create a new temporary for the gimplified
13305 expression. */
13307 /* We can't return an lvalue if we have an internal postqueue. The
13308 object the lvalue refers to would (probably) be modified by the
13309 postqueue; we need to copy the value out first, which means an
13310 rvalue. */
13311 if ((fallback & fb_lvalue)
13312 && gimple_seq_empty_p (internal_post)
13313 && is_gimple_addressable (*expr_p))
13315 /* An lvalue will do. Take the address of the expression, store it
13316 in a temporary, and replace the expression with an INDIRECT_REF of
13317 that temporary. */
13318 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
13319 unsigned int ref_align = get_object_alignment (*expr_p);
13320 tree ref_type = TREE_TYPE (*expr_p);
13321 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
13322 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
13323 if (TYPE_ALIGN (ref_type) != ref_align)
13324 ref_type = build_aligned_type (ref_type, ref_align);
13325 *expr_p = build2 (MEM_REF, ref_type,
13326 tmp, build_zero_cst (ref_alias_type));
13328 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
13330 /* An rvalue will do. Assign the gimplified expression into a
13331 new temporary TMP and replace the original expression with
13332 TMP. First, make sure that the expression has a type so that
13333 it can be assigned into a temporary. */
13334 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
13335 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
13337 else
13339 #ifdef ENABLE_GIMPLE_CHECKING
13340 if (!(fallback & fb_mayfail))
13342 fprintf (stderr, "gimplification failed:\n");
13343 print_generic_expr (stderr, *expr_p);
13344 debug_tree (*expr_p);
13345 internal_error ("gimplification failed");
13347 #endif
13348 gcc_assert (fallback & fb_mayfail);
13350 /* If this is an asm statement, and the user asked for the
13351 impossible, don't die. Fail and let gimplify_asm_expr
13352 issue an error. */
13353 ret = GS_ERROR;
13354 goto out;
13357 /* Make sure the temporary matches our predicate. */
13358 gcc_assert ((*gimple_test_f) (*expr_p));
13360 if (!gimple_seq_empty_p (internal_post))
13362 annotate_all_with_location (internal_post, input_location);
13363 gimplify_seq_add_seq (pre_p, internal_post);
13366 out:
13367 input_location = saved_location;
13368 return ret;
13371 /* Like gimplify_expr but make sure the gimplified result is not itself
13372 a SSA name (but a decl if it were). Temporaries required by
13373 evaluating *EXPR_P may be still SSA names. */
13375 static enum gimplify_status
13376 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13377 bool (*gimple_test_f) (tree), fallback_t fallback,
13378 bool allow_ssa)
13380 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
13381 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
13382 gimple_test_f, fallback);
13383 if (! allow_ssa
13384 && TREE_CODE (*expr_p) == SSA_NAME)
13386 tree name = *expr_p;
13387 if (was_ssa_name_p)
13388 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
13389 else
13391 /* Avoid the extra copy if possible. */
13392 *expr_p = create_tmp_reg (TREE_TYPE (name));
13393 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
13394 release_ssa_name (name);
13397 return ret;
13400 /* Look through TYPE for variable-sized objects and gimplify each such
13401 size that we find. Add to LIST_P any statements generated. */
13403 void
13404 gimplify_type_sizes (tree type, gimple_seq *list_p)
13406 tree field, t;
13408 if (type == NULL || type == error_mark_node)
13409 return;
13411 /* We first do the main variant, then copy into any other variants. */
13412 type = TYPE_MAIN_VARIANT (type);
13414 /* Avoid infinite recursion. */
13415 if (TYPE_SIZES_GIMPLIFIED (type))
13416 return;
13418 TYPE_SIZES_GIMPLIFIED (type) = 1;
13420 switch (TREE_CODE (type))
13422 case INTEGER_TYPE:
13423 case ENUMERAL_TYPE:
13424 case BOOLEAN_TYPE:
13425 case REAL_TYPE:
13426 case FIXED_POINT_TYPE:
13427 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
13428 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
13430 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
13432 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
13433 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
13435 break;
13437 case ARRAY_TYPE:
13438 /* These types may not have declarations, so handle them here. */
13439 gimplify_type_sizes (TREE_TYPE (type), list_p);
13440 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
13441 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
13442 with assigned stack slots, for -O1+ -g they should be tracked
13443 by VTA. */
13444 if (!(TYPE_NAME (type)
13445 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
13446 && DECL_IGNORED_P (TYPE_NAME (type)))
13447 && TYPE_DOMAIN (type)
13448 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
13450 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
13451 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
13452 DECL_IGNORED_P (t) = 0;
13453 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
13454 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
13455 DECL_IGNORED_P (t) = 0;
13457 break;
13459 case RECORD_TYPE:
13460 case UNION_TYPE:
13461 case QUAL_UNION_TYPE:
13462 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
13463 if (TREE_CODE (field) == FIELD_DECL)
13465 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
13466 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
13467 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
13468 gimplify_type_sizes (TREE_TYPE (field), list_p);
13470 break;
13472 case POINTER_TYPE:
13473 case REFERENCE_TYPE:
13474 /* We used to recurse on the pointed-to type here, which turned out to
13475 be incorrect because its definition might refer to variables not
13476 yet initialized at this point if a forward declaration is involved.
13478 It was actually useful for anonymous pointed-to types to ensure
13479 that the sizes evaluation dominates every possible later use of the
13480 values. Restricting to such types here would be safe since there
13481 is no possible forward declaration around, but would introduce an
13482 undesirable middle-end semantic to anonymity. We then defer to
13483 front-ends the responsibility of ensuring that the sizes are
13484 evaluated both early and late enough, e.g. by attaching artificial
13485 type declarations to the tree. */
13486 break;
13488 default:
13489 break;
13492 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
13493 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
13495 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
13497 TYPE_SIZE (t) = TYPE_SIZE (type);
13498 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
13499 TYPE_SIZES_GIMPLIFIED (t) = 1;
13503 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
13504 a size or position, has had all of its SAVE_EXPRs evaluated.
13505 We add any required statements to *STMT_P. */
13507 void
13508 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
13510 tree expr = *expr_p;
13512 /* We don't do anything if the value isn't there, is constant, or contains
13513 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
13514 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
13515 will want to replace it with a new variable, but that will cause problems
13516 if this type is from outside the function. It's OK to have that here. */
13517 if (expr == NULL_TREE
13518 || is_gimple_constant (expr)
13519 || TREE_CODE (expr) == VAR_DECL
13520 || CONTAINS_PLACEHOLDER_P (expr))
13521 return;
13523 *expr_p = unshare_expr (expr);
13525 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
13526 if the def vanishes. */
13527 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
13529 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
13530 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
13531 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
13532 if (is_gimple_constant (*expr_p))
13533 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
13536 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
13537 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
13538 is true, also gimplify the parameters. */
13540 gbind *
13541 gimplify_body (tree fndecl, bool do_parms)
13543 location_t saved_location = input_location;
13544 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
13545 gimple *outer_stmt;
13546 gbind *outer_bind;
13548 timevar_push (TV_TREE_GIMPLIFY);
13550 init_tree_ssa (cfun);
13552 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
13553 gimplification. */
13554 default_rtl_profile ();
13556 gcc_assert (gimplify_ctxp == NULL);
13557 push_gimplify_context (true);
13559 if (flag_openacc || flag_openmp)
13561 gcc_assert (gimplify_omp_ctxp == NULL);
13562 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
13563 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
13566 /* Unshare most shared trees in the body and in that of any nested functions.
13567 It would seem we don't have to do this for nested functions because
13568 they are supposed to be output and then the outer function gimplified
13569 first, but the g++ front end doesn't always do it that way. */
13570 unshare_body (fndecl);
13571 unvisit_body (fndecl);
13573 /* Make sure input_location isn't set to something weird. */
13574 input_location = DECL_SOURCE_LOCATION (fndecl);
13576 /* Resolve callee-copies. This has to be done before processing
13577 the body so that DECL_VALUE_EXPR gets processed correctly. */
13578 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
13580 /* Gimplify the function's body. */
13581 seq = NULL;
13582 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
13583 outer_stmt = gimple_seq_first_stmt (seq);
13584 if (!outer_stmt)
13586 outer_stmt = gimple_build_nop ();
13587 gimplify_seq_add_stmt (&seq, outer_stmt);
13590 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
13591 not the case, wrap everything in a GIMPLE_BIND to make it so. */
13592 if (gimple_code (outer_stmt) == GIMPLE_BIND
13593 && gimple_seq_first (seq) == gimple_seq_last (seq))
13594 outer_bind = as_a <gbind *> (outer_stmt);
13595 else
13596 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
13598 DECL_SAVED_TREE (fndecl) = NULL_TREE;
13600 /* If we had callee-copies statements, insert them at the beginning
13601 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
13602 if (!gimple_seq_empty_p (parm_stmts))
13604 tree parm;
13606 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
13607 if (parm_cleanup)
13609 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
13610 GIMPLE_TRY_FINALLY);
13611 parm_stmts = NULL;
13612 gimple_seq_add_stmt (&parm_stmts, g);
13614 gimple_bind_set_body (outer_bind, parm_stmts);
13616 for (parm = DECL_ARGUMENTS (current_function_decl);
13617 parm; parm = DECL_CHAIN (parm))
13618 if (DECL_HAS_VALUE_EXPR_P (parm))
13620 DECL_HAS_VALUE_EXPR_P (parm) = 0;
13621 DECL_IGNORED_P (parm) = 0;
13625 if ((flag_openacc || flag_openmp || flag_openmp_simd)
13626 && gimplify_omp_ctxp)
13628 delete_omp_context (gimplify_omp_ctxp);
13629 gimplify_omp_ctxp = NULL;
13632 pop_gimplify_context (outer_bind);
13633 gcc_assert (gimplify_ctxp == NULL);
13635 if (flag_checking && !seen_error ())
13636 verify_gimple_in_seq (gimple_bind_body (outer_bind));
13638 timevar_pop (TV_TREE_GIMPLIFY);
13639 input_location = saved_location;
13641 return outer_bind;
13644 typedef char *char_p; /* For DEF_VEC_P. */
13646 /* Return whether we should exclude FNDECL from instrumentation. */
13648 static bool
13649 flag_instrument_functions_exclude_p (tree fndecl)
13651 vec<char_p> *v;
13653 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
13654 if (v && v->length () > 0)
13656 const char *name;
13657 int i;
13658 char *s;
13660 name = lang_hooks.decl_printable_name (fndecl, 0);
13661 FOR_EACH_VEC_ELT (*v, i, s)
13662 if (strstr (name, s) != NULL)
13663 return true;
13666 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
13667 if (v && v->length () > 0)
13669 const char *name;
13670 int i;
13671 char *s;
13673 name = DECL_SOURCE_FILE (fndecl);
13674 FOR_EACH_VEC_ELT (*v, i, s)
13675 if (strstr (name, s) != NULL)
13676 return true;
13679 return false;
13682 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
13683 node for the function we want to gimplify.
13685 Return the sequence of GIMPLE statements corresponding to the body
13686 of FNDECL. */
13688 void
13689 gimplify_function_tree (tree fndecl)
13691 tree parm, ret;
13692 gimple_seq seq;
13693 gbind *bind;
13695 gcc_assert (!gimple_body (fndecl));
13697 if (DECL_STRUCT_FUNCTION (fndecl))
13698 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
13699 else
13700 push_struct_function (fndecl);
13702 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
13703 if necessary. */
13704 cfun->curr_properties |= PROP_gimple_lva;
13706 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
13708 /* Preliminarily mark non-addressed complex variables as eligible
13709 for promotion to gimple registers. We'll transform their uses
13710 as we find them. */
13711 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
13712 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
13713 && !TREE_THIS_VOLATILE (parm)
13714 && !needs_to_live_in_memory (parm))
13715 DECL_GIMPLE_REG_P (parm) = 1;
13718 ret = DECL_RESULT (fndecl);
13719 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
13720 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
13721 && !needs_to_live_in_memory (ret))
13722 DECL_GIMPLE_REG_P (ret) = 1;
13724 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
13725 asan_poisoned_variables = new hash_set<tree> ();
13726 bind = gimplify_body (fndecl, true);
13727 if (asan_poisoned_variables)
13729 delete asan_poisoned_variables;
13730 asan_poisoned_variables = NULL;
13733 /* The tree body of the function is no longer needed, replace it
13734 with the new GIMPLE body. */
13735 seq = NULL;
13736 gimple_seq_add_stmt (&seq, bind);
13737 gimple_set_body (fndecl, seq);
13739 /* If we're instrumenting function entry/exit, then prepend the call to
13740 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
13741 catch the exit hook. */
13742 /* ??? Add some way to ignore exceptions for this TFE. */
13743 if (flag_instrument_function_entry_exit
13744 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
13745 /* Do not instrument extern inline functions. */
13746 && !(DECL_DECLARED_INLINE_P (fndecl)
13747 && DECL_EXTERNAL (fndecl)
13748 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
13749 && !flag_instrument_functions_exclude_p (fndecl))
13751 tree x;
13752 gbind *new_bind;
13753 gimple *tf;
13754 gimple_seq cleanup = NULL, body = NULL;
13755 tree tmp_var, this_fn_addr;
13756 gcall *call;
13758 /* The instrumentation hooks aren't going to call the instrumented
13759 function and the address they receive is expected to be matchable
13760 against symbol addresses. Make sure we don't create a trampoline,
13761 in case the current function is nested. */
13762 this_fn_addr = build_fold_addr_expr (current_function_decl);
13763 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
13765 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
13766 call = gimple_build_call (x, 1, integer_zero_node);
13767 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
13768 gimple_call_set_lhs (call, tmp_var);
13769 gimplify_seq_add_stmt (&cleanup, call);
13770 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
13771 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
13772 gimplify_seq_add_stmt (&cleanup, call);
13773 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
13775 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
13776 call = gimple_build_call (x, 1, integer_zero_node);
13777 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
13778 gimple_call_set_lhs (call, tmp_var);
13779 gimplify_seq_add_stmt (&body, call);
13780 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
13781 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
13782 gimplify_seq_add_stmt (&body, call);
13783 gimplify_seq_add_stmt (&body, tf);
13784 new_bind = gimple_build_bind (NULL, body, NULL);
13786 /* Replace the current function body with the body
13787 wrapped in the try/finally TF. */
13788 seq = NULL;
13789 gimple_seq_add_stmt (&seq, new_bind);
13790 gimple_set_body (fndecl, seq);
13791 bind = new_bind;
13794 if (sanitize_flags_p (SANITIZE_THREAD))
13796 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
13797 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
13798 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
13799 /* Replace the current function body with the body
13800 wrapped in the try/finally TF. */
13801 seq = NULL;
13802 gimple_seq_add_stmt (&seq, new_bind);
13803 gimple_set_body (fndecl, seq);
13806 DECL_SAVED_TREE (fndecl) = NULL_TREE;
13807 cfun->curr_properties |= PROP_gimple_any;
13809 pop_cfun ();
13811 dump_function (TDI_gimple, fndecl);
13814 /* Return a dummy expression of type TYPE in order to keep going after an
13815 error. */
13817 static tree
13818 dummy_object (tree type)
13820 tree t = build_int_cst (build_pointer_type (type), 0);
13821 return build2 (MEM_REF, type, t, t);
13824 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
13825 builtin function, but a very special sort of operator. */
13827 enum gimplify_status
13828 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
13829 gimple_seq *post_p ATTRIBUTE_UNUSED)
13831 tree promoted_type, have_va_type;
13832 tree valist = TREE_OPERAND (*expr_p, 0);
13833 tree type = TREE_TYPE (*expr_p);
13834 tree t, tag, aptag;
13835 location_t loc = EXPR_LOCATION (*expr_p);
13837 /* Verify that valist is of the proper type. */
13838 have_va_type = TREE_TYPE (valist);
13839 if (have_va_type == error_mark_node)
13840 return GS_ERROR;
13841 have_va_type = targetm.canonical_va_list_type (have_va_type);
13842 if (have_va_type == NULL_TREE
13843 && POINTER_TYPE_P (TREE_TYPE (valist)))
13844 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
13845 have_va_type
13846 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
13847 gcc_assert (have_va_type != NULL_TREE);
13849 /* Generate a diagnostic for requesting data of a type that cannot
13850 be passed through `...' due to type promotion at the call site. */
13851 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
13852 != type)
13854 static bool gave_help;
13855 bool warned;
13856 /* Use the expansion point to handle cases such as passing bool (defined
13857 in a system header) through `...'. */
13858 location_t xloc
13859 = expansion_point_location_if_in_system_header (loc);
13861 /* Unfortunately, this is merely undefined, rather than a constraint
13862 violation, so we cannot make this an error. If this call is never
13863 executed, the program is still strictly conforming. */
13864 auto_diagnostic_group d;
13865 warned = warning_at (xloc, 0,
13866 "%qT is promoted to %qT when passed through %<...%>",
13867 type, promoted_type);
13868 if (!gave_help && warned)
13870 gave_help = true;
13871 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
13872 promoted_type, type);
13875 /* We can, however, treat "undefined" any way we please.
13876 Call abort to encourage the user to fix the program. */
13877 if (warned)
13878 inform (xloc, "if this code is reached, the program will abort");
13879 /* Before the abort, allow the evaluation of the va_list
13880 expression to exit or longjmp. */
13881 gimplify_and_add (valist, pre_p);
13882 t = build_call_expr_loc (loc,
13883 builtin_decl_implicit (BUILT_IN_TRAP), 0);
13884 gimplify_and_add (t, pre_p);
13886 /* This is dead code, but go ahead and finish so that the
13887 mode of the result comes out right. */
13888 *expr_p = dummy_object (type);
13889 return GS_ALL_DONE;
13892 tag = build_int_cst (build_pointer_type (type), 0);
13893 aptag = build_int_cst (TREE_TYPE (valist), 0);
13895 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
13896 valist, tag, aptag);
13898 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
13899 needs to be expanded. */
13900 cfun->curr_properties &= ~PROP_gimple_lva;
13902 return GS_OK;
13905 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
13907 DST/SRC are the destination and source respectively. You can pass
13908 ungimplified trees in DST or SRC, in which case they will be
13909 converted to a gimple operand if necessary.
13911 This function returns the newly created GIMPLE_ASSIGN tuple. */
13913 gimple *
13914 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
13916 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
13917 gimplify_and_add (t, seq_p);
13918 ggc_free (t);
13919 return gimple_seq_last_stmt (*seq_p);
13922 inline hashval_t
13923 gimplify_hasher::hash (const elt_t *p)
13925 tree t = p->val;
13926 return iterative_hash_expr (t, 0);
13929 inline bool
13930 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
13932 tree t1 = p1->val;
13933 tree t2 = p2->val;
13934 enum tree_code code = TREE_CODE (t1);
13936 if (TREE_CODE (t2) != code
13937 || TREE_TYPE (t1) != TREE_TYPE (t2))
13938 return false;
13940 if (!operand_equal_p (t1, t2, 0))
13941 return false;
13943 /* Only allow them to compare equal if they also hash equal; otherwise
13944 results are nondeterminate, and we fail bootstrap comparison. */
13945 gcc_checking_assert (hash (p1) == hash (p2));
13947 return true;