PR rtl-optimization/87817
[official-gcc.git] / gcc / gimplify.c
blobad7f824e0fa2a70bd86016f1c96b756c44d7d392
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2018 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
69 /* Hash set of poisoned variables in a bind expr. */
70 static hash_set<tree> *asan_poisoned_variables = NULL;
72 enum gimplify_omp_var_data
74 GOVD_SEEN = 1,
75 GOVD_EXPLICIT = 2,
76 GOVD_SHARED = 4,
77 GOVD_PRIVATE = 8,
78 GOVD_FIRSTPRIVATE = 16,
79 GOVD_LASTPRIVATE = 32,
80 GOVD_REDUCTION = 64,
81 GOVD_LOCAL = 128,
82 GOVD_MAP = 256,
83 GOVD_DEBUG_PRIVATE = 512,
84 GOVD_PRIVATE_OUTER_REF = 1024,
85 GOVD_LINEAR = 2048,
86 GOVD_ALIGNED = 4096,
88 /* Flag for GOVD_MAP: don't copy back. */
89 GOVD_MAP_TO_ONLY = 8192,
91 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
92 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
94 GOVD_MAP_0LEN_ARRAY = 32768,
96 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
97 GOVD_MAP_ALWAYS_TO = 65536,
99 /* Flag for shared vars that are or might be stored to in the region. */
100 GOVD_WRITTEN = 131072,
102 /* Flag for GOVD_MAP, if it is a forced mapping. */
103 GOVD_MAP_FORCE = 262144,
105 /* Flag for GOVD_MAP: must be present already. */
106 GOVD_MAP_FORCE_PRESENT = 524288,
108 /* Flag for GOVD_MAP: only allocate. */
109 GOVD_MAP_ALLOC_ONLY = 1048576,
111 /* Flag for GOVD_MAP: only copy back. */
112 GOVD_MAP_FROM_ONLY = 2097152,
114 GOVD_NONTEMPORAL = 4194304,
116 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
117 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
118 | GOVD_LOCAL)
122 enum omp_region_type
124 ORT_WORKSHARE = 0x00,
125 ORT_TASKGROUP = 0x01,
126 ORT_SIMD = 0x04,
128 ORT_PARALLEL = 0x08,
129 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
131 ORT_TASK = 0x10,
132 ORT_UNTIED_TASK = ORT_TASK | 1,
133 ORT_TASKLOOP = ORT_TASK | 2,
134 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
136 ORT_TEAMS = 0x20,
137 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
138 ORT_HOST_TEAMS = ORT_TEAMS | 2,
139 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
141 /* Data region. */
142 ORT_TARGET_DATA = 0x40,
144 /* Data region with offloading. */
145 ORT_TARGET = 0x80,
146 ORT_COMBINED_TARGET = ORT_TARGET | 1,
148 /* OpenACC variants. */
149 ORT_ACC = 0x100, /* A generic OpenACC region. */
150 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
151 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
152 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
153 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
155 /* Dummy OpenMP region, used to disable expansion of
156 DECL_VALUE_EXPRs in taskloop pre body. */
157 ORT_NONE = 0x200
160 /* Gimplify hashtable helper. */
162 struct gimplify_hasher : free_ptr_hash <elt_t>
164 static inline hashval_t hash (const elt_t *);
165 static inline bool equal (const elt_t *, const elt_t *);
168 struct gimplify_ctx
170 struct gimplify_ctx *prev_context;
172 vec<gbind *> bind_expr_stack;
173 tree temps;
174 gimple_seq conditional_cleanups;
175 tree exit_label;
176 tree return_temp;
178 vec<tree> case_labels;
179 hash_set<tree> *live_switch_vars;
180 /* The formal temporary table. Should this be persistent? */
181 hash_table<gimplify_hasher> *temp_htab;
183 int conditions;
184 unsigned into_ssa : 1;
185 unsigned allow_rhs_cond_expr : 1;
186 unsigned in_cleanup_point_expr : 1;
187 unsigned keep_stack : 1;
188 unsigned save_stack : 1;
189 unsigned in_switch_expr : 1;
192 enum gimplify_defaultmap_kind
194 GDMK_SCALAR,
195 GDMK_AGGREGATE,
196 GDMK_ALLOCATABLE,
197 GDMK_POINTER
200 struct gimplify_omp_ctx
202 struct gimplify_omp_ctx *outer_context;
203 splay_tree variables;
204 hash_set<tree> *privatized_types;
205 /* Iteration variables in an OMP_FOR. */
206 vec<tree> loop_iter_var;
207 location_t location;
208 enum omp_clause_default_kind default_kind;
209 enum omp_region_type region_type;
210 bool combined_loop;
211 bool distribute;
212 bool target_firstprivatize_array_bases;
213 int defaultmap[4];
216 static struct gimplify_ctx *gimplify_ctxp;
217 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
219 /* Forward declaration. */
220 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
221 static hash_map<tree, tree> *oacc_declare_returns;
222 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
223 bool (*) (tree), fallback_t, bool);
225 /* Shorter alias name for the above function for use in gimplify.c
226 only. */
228 static inline void
229 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
231 gimple_seq_add_stmt_without_update (seq_p, gs);
234 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
235 NULL, a new sequence is allocated. This function is
236 similar to gimple_seq_add_seq, but does not scan the operands.
237 During gimplification, we need to manipulate statement sequences
238 before the def/use vectors have been constructed. */
240 static void
241 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
243 gimple_stmt_iterator si;
245 if (src == NULL)
246 return;
248 si = gsi_last (*dst_p);
249 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
253 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
254 and popping gimplify contexts. */
256 static struct gimplify_ctx *ctx_pool = NULL;
258 /* Return a gimplify context struct from the pool. */
260 static inline struct gimplify_ctx *
261 ctx_alloc (void)
263 struct gimplify_ctx * c = ctx_pool;
265 if (c)
266 ctx_pool = c->prev_context;
267 else
268 c = XNEW (struct gimplify_ctx);
270 memset (c, '\0', sizeof (*c));
271 return c;
274 /* Put gimplify context C back into the pool. */
276 static inline void
277 ctx_free (struct gimplify_ctx *c)
279 c->prev_context = ctx_pool;
280 ctx_pool = c;
283 /* Free allocated ctx stack memory. */
285 void
286 free_gimplify_stack (void)
288 struct gimplify_ctx *c;
290 while ((c = ctx_pool))
292 ctx_pool = c->prev_context;
293 free (c);
298 /* Set up a context for the gimplifier. */
300 void
301 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
303 struct gimplify_ctx *c = ctx_alloc ();
305 c->prev_context = gimplify_ctxp;
306 gimplify_ctxp = c;
307 gimplify_ctxp->into_ssa = in_ssa;
308 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
311 /* Tear down a context for the gimplifier. If BODY is non-null, then
312 put the temporaries into the outer BIND_EXPR. Otherwise, put them
313 in the local_decls.
315 BODY is not a sequence, but the first tuple in a sequence. */
317 void
318 pop_gimplify_context (gimple *body)
320 struct gimplify_ctx *c = gimplify_ctxp;
322 gcc_assert (c
323 && (!c->bind_expr_stack.exists ()
324 || c->bind_expr_stack.is_empty ()));
325 c->bind_expr_stack.release ();
326 gimplify_ctxp = c->prev_context;
328 if (body)
329 declare_vars (c->temps, body, false);
330 else
331 record_vars (c->temps);
333 delete c->temp_htab;
334 c->temp_htab = NULL;
335 ctx_free (c);
338 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
340 static void
341 gimple_push_bind_expr (gbind *bind_stmt)
343 gimplify_ctxp->bind_expr_stack.reserve (8);
344 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
347 /* Pop the first element off the stack of bindings. */
349 static void
350 gimple_pop_bind_expr (void)
352 gimplify_ctxp->bind_expr_stack.pop ();
355 /* Return the first element of the stack of bindings. */
357 gbind *
358 gimple_current_bind_expr (void)
360 return gimplify_ctxp->bind_expr_stack.last ();
363 /* Return the stack of bindings created during gimplification. */
365 vec<gbind *>
366 gimple_bind_expr_stack (void)
368 return gimplify_ctxp->bind_expr_stack;
371 /* Return true iff there is a COND_EXPR between us and the innermost
372 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
374 static bool
375 gimple_conditional_context (void)
377 return gimplify_ctxp->conditions > 0;
380 /* Note that we've entered a COND_EXPR. */
382 static void
383 gimple_push_condition (void)
385 #ifdef ENABLE_GIMPLE_CHECKING
386 if (gimplify_ctxp->conditions == 0)
387 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
388 #endif
389 ++(gimplify_ctxp->conditions);
392 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
393 now, add any conditional cleanups we've seen to the prequeue. */
395 static void
396 gimple_pop_condition (gimple_seq *pre_p)
398 int conds = --(gimplify_ctxp->conditions);
400 gcc_assert (conds >= 0);
401 if (conds == 0)
403 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
404 gimplify_ctxp->conditional_cleanups = NULL;
408 /* A stable comparison routine for use with splay trees and DECLs. */
410 static int
411 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
413 tree a = (tree) xa;
414 tree b = (tree) xb;
416 return DECL_UID (a) - DECL_UID (b);
419 /* Create a new omp construct that deals with variable remapping. */
421 static struct gimplify_omp_ctx *
422 new_omp_context (enum omp_region_type region_type)
424 struct gimplify_omp_ctx *c;
426 c = XCNEW (struct gimplify_omp_ctx);
427 c->outer_context = gimplify_omp_ctxp;
428 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
429 c->privatized_types = new hash_set<tree>;
430 c->location = input_location;
431 c->region_type = region_type;
432 if ((region_type & ORT_TASK) == 0)
433 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
434 else
435 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
436 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
437 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
438 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
439 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
441 return c;
444 /* Destroy an omp construct that deals with variable remapping. */
446 static void
447 delete_omp_context (struct gimplify_omp_ctx *c)
449 splay_tree_delete (c->variables);
450 delete c->privatized_types;
451 c->loop_iter_var.release ();
452 XDELETE (c);
455 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
456 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
458 /* Both gimplify the statement T and append it to *SEQ_P. This function
459 behaves exactly as gimplify_stmt, but you don't have to pass T as a
460 reference. */
462 void
463 gimplify_and_add (tree t, gimple_seq *seq_p)
465 gimplify_stmt (&t, seq_p);
468 /* Gimplify statement T into sequence *SEQ_P, and return the first
469 tuple in the sequence of generated tuples for this statement.
470 Return NULL if gimplifying T produced no tuples. */
472 static gimple *
473 gimplify_and_return_first (tree t, gimple_seq *seq_p)
475 gimple_stmt_iterator last = gsi_last (*seq_p);
477 gimplify_and_add (t, seq_p);
479 if (!gsi_end_p (last))
481 gsi_next (&last);
482 return gsi_stmt (last);
484 else
485 return gimple_seq_first_stmt (*seq_p);
488 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
489 LHS, or for a call argument. */
491 static bool
492 is_gimple_mem_rhs (tree t)
494 /* If we're dealing with a renamable type, either source or dest must be
495 a renamed variable. */
496 if (is_gimple_reg_type (TREE_TYPE (t)))
497 return is_gimple_val (t);
498 else
499 return is_gimple_val (t) || is_gimple_lvalue (t);
502 /* Return true if T is a CALL_EXPR or an expression that can be
503 assigned to a temporary. Note that this predicate should only be
504 used during gimplification. See the rationale for this in
505 gimplify_modify_expr. */
507 static bool
508 is_gimple_reg_rhs_or_call (tree t)
510 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
511 || TREE_CODE (t) == CALL_EXPR);
514 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
515 this predicate should only be used during gimplification. See the
516 rationale for this in gimplify_modify_expr. */
518 static bool
519 is_gimple_mem_rhs_or_call (tree t)
521 /* If we're dealing with a renamable type, either source or dest must be
522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t)))
524 return is_gimple_val (t);
525 else
526 return (is_gimple_val (t)
527 || is_gimple_lvalue (t)
528 || TREE_CLOBBER_P (t)
529 || TREE_CODE (t) == CALL_EXPR);
532 /* Create a temporary with a name derived from VAL. Subroutine of
533 lookup_tmp_var; nobody else should call this function. */
535 static inline tree
536 create_tmp_from_val (tree val)
538 /* Drop all qualifiers and address-space information from the value type. */
539 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
540 tree var = create_tmp_var (type, get_name (val));
541 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
542 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
543 DECL_GIMPLE_REG_P (var) = 1;
544 return var;
547 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
548 an existing expression temporary. */
550 static tree
551 lookup_tmp_var (tree val, bool is_formal)
553 tree ret;
555 /* If not optimizing, never really reuse a temporary. local-alloc
556 won't allocate any variable that is used in more than one basic
557 block, which means it will go into memory, causing much extra
558 work in reload and final and poorer code generation, outweighing
559 the extra memory allocation here. */
560 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
561 ret = create_tmp_from_val (val);
562 else
564 elt_t elt, *elt_p;
565 elt_t **slot;
567 elt.val = val;
568 if (!gimplify_ctxp->temp_htab)
569 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
570 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
571 if (*slot == NULL)
573 elt_p = XNEW (elt_t);
574 elt_p->val = val;
575 elt_p->temp = ret = create_tmp_from_val (val);
576 *slot = elt_p;
578 else
580 elt_p = *slot;
581 ret = elt_p->temp;
585 return ret;
588 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
590 static tree
591 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
592 bool is_formal, bool allow_ssa)
594 tree t, mod;
596 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
597 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
598 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
599 fb_rvalue);
601 if (allow_ssa
602 && gimplify_ctxp->into_ssa
603 && is_gimple_reg_type (TREE_TYPE (val)))
605 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
606 if (! gimple_in_ssa_p (cfun))
608 const char *name = get_name (val);
609 if (name)
610 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
613 else
614 t = lookup_tmp_var (val, is_formal);
616 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
618 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
620 /* gimplify_modify_expr might want to reduce this further. */
621 gimplify_and_add (mod, pre_p);
622 ggc_free (mod);
624 return t;
627 /* Return a formal temporary variable initialized with VAL. PRE_P is as
628 in gimplify_expr. Only use this function if:
630 1) The value of the unfactored expression represented by VAL will not
631 change between the initialization and use of the temporary, and
632 2) The temporary will not be otherwise modified.
634 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
635 and #2 means it is inappropriate for && temps.
637 For other cases, use get_initialized_tmp_var instead. */
639 tree
640 get_formal_tmp_var (tree val, gimple_seq *pre_p)
642 return internal_get_tmp_var (val, pre_p, NULL, true, true);
645 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
646 are as in gimplify_expr. */
648 tree
649 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
650 bool allow_ssa)
652 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
655 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
656 generate debug info for them; otherwise don't. */
658 void
659 declare_vars (tree vars, gimple *gs, bool debug_info)
661 tree last = vars;
662 if (last)
664 tree temps, block;
666 gbind *scope = as_a <gbind *> (gs);
668 temps = nreverse (last);
670 block = gimple_bind_block (scope);
671 gcc_assert (!block || TREE_CODE (block) == BLOCK);
672 if (!block || !debug_info)
674 DECL_CHAIN (last) = gimple_bind_vars (scope);
675 gimple_bind_set_vars (scope, temps);
677 else
679 /* We need to attach the nodes both to the BIND_EXPR and to its
680 associated BLOCK for debugging purposes. The key point here
681 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
682 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
683 if (BLOCK_VARS (block))
684 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
685 else
687 gimple_bind_set_vars (scope,
688 chainon (gimple_bind_vars (scope), temps));
689 BLOCK_VARS (block) = temps;
695 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
696 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
697 no such upper bound can be obtained. */
699 static void
700 force_constant_size (tree var)
702 /* The only attempt we make is by querying the maximum size of objects
703 of the variable's type. */
705 HOST_WIDE_INT max_size;
707 gcc_assert (VAR_P (var));
709 max_size = max_int_size_in_bytes (TREE_TYPE (var));
711 gcc_assert (max_size >= 0);
713 DECL_SIZE_UNIT (var)
714 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
715 DECL_SIZE (var)
716 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
719 /* Push the temporary variable TMP into the current binding. */
721 void
722 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
724 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
726 /* Later processing assumes that the object size is constant, which might
727 not be true at this point. Force the use of a constant upper bound in
728 this case. */
729 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
730 force_constant_size (tmp);
732 DECL_CONTEXT (tmp) = fn->decl;
733 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
735 record_vars_into (tmp, fn->decl);
738 /* Push the temporary variable TMP into the current binding. */
740 void
741 gimple_add_tmp_var (tree tmp)
743 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
745 /* Later processing assumes that the object size is constant, which might
746 not be true at this point. Force the use of a constant upper bound in
747 this case. */
748 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
749 force_constant_size (tmp);
751 DECL_CONTEXT (tmp) = current_function_decl;
752 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
754 if (gimplify_ctxp)
756 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
757 gimplify_ctxp->temps = tmp;
759 /* Mark temporaries local within the nearest enclosing parallel. */
760 if (gimplify_omp_ctxp)
762 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
763 while (ctx
764 && (ctx->region_type == ORT_WORKSHARE
765 || ctx->region_type == ORT_TASKGROUP
766 || ctx->region_type == ORT_SIMD
767 || ctx->region_type == ORT_ACC))
768 ctx = ctx->outer_context;
769 if (ctx)
770 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
773 else if (cfun)
774 record_vars (tmp);
775 else
777 gimple_seq body_seq;
779 /* This case is for nested functions. We need to expose the locals
780 they create. */
781 body_seq = gimple_body (current_function_decl);
782 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
788 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
789 nodes that are referenced more than once in GENERIC functions. This is
790 necessary because gimplification (translation into GIMPLE) is performed
791 by modifying tree nodes in-place, so gimplication of a shared node in a
792 first context could generate an invalid GIMPLE form in a second context.
794 This is achieved with a simple mark/copy/unmark algorithm that walks the
795 GENERIC representation top-down, marks nodes with TREE_VISITED the first
796 time it encounters them, duplicates them if they already have TREE_VISITED
797 set, and finally removes the TREE_VISITED marks it has set.
799 The algorithm works only at the function level, i.e. it generates a GENERIC
800 representation of a function with no nodes shared within the function when
801 passed a GENERIC function (except for nodes that are allowed to be shared).
803 At the global level, it is also necessary to unshare tree nodes that are
804 referenced in more than one function, for the same aforementioned reason.
805 This requires some cooperation from the front-end. There are 2 strategies:
807 1. Manual unsharing. The front-end needs to call unshare_expr on every
808 expression that might end up being shared across functions.
810 2. Deep unsharing. This is an extension of regular unsharing. Instead
811 of calling unshare_expr on expressions that might be shared across
812 functions, the front-end pre-marks them with TREE_VISITED. This will
813 ensure that they are unshared on the first reference within functions
814 when the regular unsharing algorithm runs. The counterpart is that
815 this algorithm must look deeper than for manual unsharing, which is
816 specified by LANG_HOOKS_DEEP_UNSHARING.
818 If there are only few specific cases of node sharing across functions, it is
819 probably easier for a front-end to unshare the expressions manually. On the
820 contrary, if the expressions generated at the global level are as widespread
821 as expressions generated within functions, deep unsharing is very likely the
822 way to go. */
824 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
825 These nodes model computations that must be done once. If we were to
826 unshare something like SAVE_EXPR(i++), the gimplification process would
827 create wrong code. However, if DATA is non-null, it must hold a pointer
828 set that is used to unshare the subtrees of these nodes. */
830 static tree
831 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
833 tree t = *tp;
834 enum tree_code code = TREE_CODE (t);
836 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
837 copy their subtrees if we can make sure to do it only once. */
838 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
840 if (data && !((hash_set<tree> *)data)->add (t))
842 else
843 *walk_subtrees = 0;
846 /* Stop at types, decls, constants like copy_tree_r. */
847 else if (TREE_CODE_CLASS (code) == tcc_type
848 || TREE_CODE_CLASS (code) == tcc_declaration
849 || TREE_CODE_CLASS (code) == tcc_constant)
850 *walk_subtrees = 0;
852 /* Cope with the statement expression extension. */
853 else if (code == STATEMENT_LIST)
856 /* Leave the bulk of the work to copy_tree_r itself. */
857 else
858 copy_tree_r (tp, walk_subtrees, NULL);
860 return NULL_TREE;
863 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
864 If *TP has been visited already, then *TP is deeply copied by calling
865 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
867 static tree
868 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
870 tree t = *tp;
871 enum tree_code code = TREE_CODE (t);
873 /* Skip types, decls, and constants. But we do want to look at their
874 types and the bounds of types. Mark them as visited so we properly
875 unmark their subtrees on the unmark pass. If we've already seen them,
876 don't look down further. */
877 if (TREE_CODE_CLASS (code) == tcc_type
878 || TREE_CODE_CLASS (code) == tcc_declaration
879 || TREE_CODE_CLASS (code) == tcc_constant)
881 if (TREE_VISITED (t))
882 *walk_subtrees = 0;
883 else
884 TREE_VISITED (t) = 1;
887 /* If this node has been visited already, unshare it and don't look
888 any deeper. */
889 else if (TREE_VISITED (t))
891 walk_tree (tp, mostly_copy_tree_r, data, NULL);
892 *walk_subtrees = 0;
895 /* Otherwise, mark the node as visited and keep looking. */
896 else
897 TREE_VISITED (t) = 1;
899 return NULL_TREE;
902 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
903 copy_if_shared_r callback unmodified. */
905 static inline void
906 copy_if_shared (tree *tp, void *data)
908 walk_tree (tp, copy_if_shared_r, data, NULL);
911 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
912 any nested functions. */
914 static void
915 unshare_body (tree fndecl)
917 struct cgraph_node *cgn = cgraph_node::get (fndecl);
918 /* If the language requires deep unsharing, we need a pointer set to make
919 sure we don't repeatedly unshare subtrees of unshareable nodes. */
920 hash_set<tree> *visited
921 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
923 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
924 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
925 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
927 delete visited;
929 if (cgn)
930 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
931 unshare_body (cgn->decl);
934 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
935 Subtrees are walked until the first unvisited node is encountered. */
937 static tree
938 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
940 tree t = *tp;
942 /* If this node has been visited, unmark it and keep looking. */
943 if (TREE_VISITED (t))
944 TREE_VISITED (t) = 0;
946 /* Otherwise, don't look any deeper. */
947 else
948 *walk_subtrees = 0;
950 return NULL_TREE;
953 /* Unmark the visited trees rooted at *TP. */
955 static inline void
956 unmark_visited (tree *tp)
958 walk_tree (tp, unmark_visited_r, NULL, NULL);
961 /* Likewise, but mark all trees as not visited. */
963 static void
964 unvisit_body (tree fndecl)
966 struct cgraph_node *cgn = cgraph_node::get (fndecl);
968 unmark_visited (&DECL_SAVED_TREE (fndecl));
969 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
970 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
972 if (cgn)
973 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
974 unvisit_body (cgn->decl);
977 /* Unconditionally make an unshared copy of EXPR. This is used when using
978 stored expressions which span multiple functions, such as BINFO_VTABLE,
979 as the normal unsharing process can't tell that they're shared. */
981 tree
982 unshare_expr (tree expr)
984 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
985 return expr;
988 /* Worker for unshare_expr_without_location. */
990 static tree
991 prune_expr_location (tree *tp, int *walk_subtrees, void *)
993 if (EXPR_P (*tp))
994 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
995 else
996 *walk_subtrees = 0;
997 return NULL_TREE;
1000 /* Similar to unshare_expr but also prune all expression locations
1001 from EXPR. */
1003 tree
1004 unshare_expr_without_location (tree expr)
1006 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1007 if (EXPR_P (expr))
1008 walk_tree (&expr, prune_expr_location, NULL, NULL);
1009 return expr;
1012 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1013 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1014 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1015 EXPR is the location of the EXPR. */
1017 static location_t
1018 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1020 if (!expr)
1021 return or_else;
1023 if (EXPR_HAS_LOCATION (expr))
1024 return EXPR_LOCATION (expr);
1026 if (TREE_CODE (expr) != STATEMENT_LIST)
1027 return or_else;
1029 tree_stmt_iterator i = tsi_start (expr);
1031 bool found = false;
1032 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1034 found = true;
1035 tsi_next (&i);
1038 if (!found || !tsi_one_before_end_p (i))
1039 return or_else;
1041 return rexpr_location (tsi_stmt (i), or_else);
1044 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1045 rexpr_location for the potential recursion. */
1047 static inline bool
1048 rexpr_has_location (tree expr)
1050 return rexpr_location (expr) != UNKNOWN_LOCATION;
1054 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1055 contain statements and have a value. Assign its value to a temporary
1056 and give it void_type_node. Return the temporary, or NULL_TREE if
1057 WRAPPER was already void. */
1059 tree
1060 voidify_wrapper_expr (tree wrapper, tree temp)
1062 tree type = TREE_TYPE (wrapper);
1063 if (type && !VOID_TYPE_P (type))
1065 tree *p;
1067 /* Set p to point to the body of the wrapper. Loop until we find
1068 something that isn't a wrapper. */
1069 for (p = &wrapper; p && *p; )
1071 switch (TREE_CODE (*p))
1073 case BIND_EXPR:
1074 TREE_SIDE_EFFECTS (*p) = 1;
1075 TREE_TYPE (*p) = void_type_node;
1076 /* For a BIND_EXPR, the body is operand 1. */
1077 p = &BIND_EXPR_BODY (*p);
1078 break;
1080 case CLEANUP_POINT_EXPR:
1081 case TRY_FINALLY_EXPR:
1082 case TRY_CATCH_EXPR:
1083 TREE_SIDE_EFFECTS (*p) = 1;
1084 TREE_TYPE (*p) = void_type_node;
1085 p = &TREE_OPERAND (*p, 0);
1086 break;
1088 case STATEMENT_LIST:
1090 tree_stmt_iterator i = tsi_last (*p);
1091 TREE_SIDE_EFFECTS (*p) = 1;
1092 TREE_TYPE (*p) = void_type_node;
1093 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1095 break;
1097 case COMPOUND_EXPR:
1098 /* Advance to the last statement. Set all container types to
1099 void. */
1100 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1102 TREE_SIDE_EFFECTS (*p) = 1;
1103 TREE_TYPE (*p) = void_type_node;
1105 break;
1107 case TRANSACTION_EXPR:
1108 TREE_SIDE_EFFECTS (*p) = 1;
1109 TREE_TYPE (*p) = void_type_node;
1110 p = &TRANSACTION_EXPR_BODY (*p);
1111 break;
1113 default:
1114 /* Assume that any tree upon which voidify_wrapper_expr is
1115 directly called is a wrapper, and that its body is op0. */
1116 if (p == &wrapper)
1118 TREE_SIDE_EFFECTS (*p) = 1;
1119 TREE_TYPE (*p) = void_type_node;
1120 p = &TREE_OPERAND (*p, 0);
1121 break;
1123 goto out;
1127 out:
1128 if (p == NULL || IS_EMPTY_STMT (*p))
1129 temp = NULL_TREE;
1130 else if (temp)
1132 /* The wrapper is on the RHS of an assignment that we're pushing
1133 down. */
1134 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1135 || TREE_CODE (temp) == MODIFY_EXPR);
1136 TREE_OPERAND (temp, 1) = *p;
1137 *p = temp;
1139 else
1141 temp = create_tmp_var (type, "retval");
1142 *p = build2 (INIT_EXPR, type, temp, *p);
1145 return temp;
1148 return NULL_TREE;
1151 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1152 a temporary through which they communicate. */
1154 static void
1155 build_stack_save_restore (gcall **save, gcall **restore)
1157 tree tmp_var;
1159 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1160 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1161 gimple_call_set_lhs (*save, tmp_var);
1163 *restore
1164 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1165 1, tmp_var);
1168 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1170 static tree
1171 build_asan_poison_call_expr (tree decl)
1173 /* Do not poison variables that have size equal to zero. */
1174 tree unit_size = DECL_SIZE_UNIT (decl);
1175 if (zerop (unit_size))
1176 return NULL_TREE;
1178 tree base = build_fold_addr_expr (decl);
1180 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1181 void_type_node, 3,
1182 build_int_cst (integer_type_node,
1183 ASAN_MARK_POISON),
1184 base, unit_size);
1187 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1188 on POISON flag, shadow memory of a DECL variable. The call will be
1189 put on location identified by IT iterator, where BEFORE flag drives
1190 position where the stmt will be put. */
1192 static void
1193 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1194 bool before)
1196 tree unit_size = DECL_SIZE_UNIT (decl);
1197 tree base = build_fold_addr_expr (decl);
1199 /* Do not poison variables that have size equal to zero. */
1200 if (zerop (unit_size))
1201 return;
1203 /* It's necessary to have all stack variables aligned to ASAN granularity
1204 bytes. */
1205 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1206 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1208 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1210 gimple *g
1211 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1212 build_int_cst (integer_type_node, flags),
1213 base, unit_size);
1215 if (before)
1216 gsi_insert_before (it, g, GSI_NEW_STMT);
1217 else
1218 gsi_insert_after (it, g, GSI_NEW_STMT);
1221 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1222 either poisons or unpoisons a DECL. Created statement is appended
1223 to SEQ_P gimple sequence. */
1225 static void
1226 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1228 gimple_stmt_iterator it = gsi_last (*seq_p);
1229 bool before = false;
1231 if (gsi_end_p (it))
1232 before = true;
1234 asan_poison_variable (decl, poison, &it, before);
1237 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1239 static int
1240 sort_by_decl_uid (const void *a, const void *b)
1242 const tree *t1 = (const tree *)a;
1243 const tree *t2 = (const tree *)b;
1245 int uid1 = DECL_UID (*t1);
1246 int uid2 = DECL_UID (*t2);
1248 if (uid1 < uid2)
1249 return -1;
1250 else if (uid1 > uid2)
1251 return 1;
1252 else
1253 return 0;
1256 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1257 depending on POISON flag. Created statement is appended
1258 to SEQ_P gimple sequence. */
1260 static void
1261 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1263 unsigned c = variables->elements ();
1264 if (c == 0)
1265 return;
1267 auto_vec<tree> sorted_variables (c);
1269 for (hash_set<tree>::iterator it = variables->begin ();
1270 it != variables->end (); ++it)
1271 sorted_variables.safe_push (*it);
1273 sorted_variables.qsort (sort_by_decl_uid);
1275 unsigned i;
1276 tree var;
1277 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1279 asan_poison_variable (var, poison, seq_p);
1281 /* Add use_after_scope_memory attribute for the variable in order
1282 to prevent re-written into SSA. */
1283 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1284 DECL_ATTRIBUTES (var)))
1285 DECL_ATTRIBUTES (var)
1286 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1287 integer_one_node,
1288 DECL_ATTRIBUTES (var));
1292 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1294 static enum gimplify_status
1295 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1297 tree bind_expr = *expr_p;
1298 bool old_keep_stack = gimplify_ctxp->keep_stack;
1299 bool old_save_stack = gimplify_ctxp->save_stack;
1300 tree t;
1301 gbind *bind_stmt;
1302 gimple_seq body, cleanup;
1303 gcall *stack_save;
1304 location_t start_locus = 0, end_locus = 0;
1305 tree ret_clauses = NULL;
1307 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1309 /* Mark variables seen in this bind expr. */
1310 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1312 if (VAR_P (t))
1314 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1316 /* Mark variable as local. */
1317 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1318 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1319 || splay_tree_lookup (ctx->variables,
1320 (splay_tree_key) t) == NULL))
1322 if (ctx->region_type == ORT_SIMD
1323 && TREE_ADDRESSABLE (t)
1324 && !TREE_STATIC (t))
1325 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1326 else
1327 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1330 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1332 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1333 cfun->has_local_explicit_reg_vars = true;
1336 /* Preliminarily mark non-addressed complex variables as eligible
1337 for promotion to gimple registers. We'll transform their uses
1338 as we find them. */
1339 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1340 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1341 && !TREE_THIS_VOLATILE (t)
1342 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1343 && !needs_to_live_in_memory (t))
1344 DECL_GIMPLE_REG_P (t) = 1;
1347 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1348 BIND_EXPR_BLOCK (bind_expr));
1349 gimple_push_bind_expr (bind_stmt);
1351 gimplify_ctxp->keep_stack = false;
1352 gimplify_ctxp->save_stack = false;
1354 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1355 body = NULL;
1356 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1357 gimple_bind_set_body (bind_stmt, body);
1359 /* Source location wise, the cleanup code (stack_restore and clobbers)
1360 belongs to the end of the block, so propagate what we have. The
1361 stack_save operation belongs to the beginning of block, which we can
1362 infer from the bind_expr directly if the block has no explicit
1363 assignment. */
1364 if (BIND_EXPR_BLOCK (bind_expr))
1366 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1367 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1369 if (start_locus == 0)
1370 start_locus = EXPR_LOCATION (bind_expr);
1372 cleanup = NULL;
1373 stack_save = NULL;
1375 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1376 the stack space allocated to the VLAs. */
1377 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1379 gcall *stack_restore;
1381 /* Save stack on entry and restore it on exit. Add a try_finally
1382 block to achieve this. */
1383 build_stack_save_restore (&stack_save, &stack_restore);
1385 gimple_set_location (stack_save, start_locus);
1386 gimple_set_location (stack_restore, end_locus);
1388 gimplify_seq_add_stmt (&cleanup, stack_restore);
1391 /* Add clobbers for all variables that go out of scope. */
1392 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1394 if (VAR_P (t)
1395 && !is_global_var (t)
1396 && DECL_CONTEXT (t) == current_function_decl)
1398 if (!DECL_HARD_REGISTER (t)
1399 && !TREE_THIS_VOLATILE (t)
1400 && !DECL_HAS_VALUE_EXPR_P (t)
1401 /* Only care for variables that have to be in memory. Others
1402 will be rewritten into SSA names, hence moved to the
1403 top-level. */
1404 && !is_gimple_reg (t)
1405 && flag_stack_reuse != SR_NONE)
1407 tree clobber = build_clobber (TREE_TYPE (t));
1408 gimple *clobber_stmt;
1409 clobber_stmt = gimple_build_assign (t, clobber);
1410 gimple_set_location (clobber_stmt, end_locus);
1411 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1414 if (flag_openacc && oacc_declare_returns != NULL)
1416 tree *c = oacc_declare_returns->get (t);
1417 if (c != NULL)
1419 if (ret_clauses)
1420 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1422 ret_clauses = *c;
1424 oacc_declare_returns->remove (t);
1426 if (oacc_declare_returns->elements () == 0)
1428 delete oacc_declare_returns;
1429 oacc_declare_returns = NULL;
1435 if (asan_poisoned_variables != NULL
1436 && asan_poisoned_variables->contains (t))
1438 asan_poisoned_variables->remove (t);
1439 asan_poison_variable (t, true, &cleanup);
1442 if (gimplify_ctxp->live_switch_vars != NULL
1443 && gimplify_ctxp->live_switch_vars->contains (t))
1444 gimplify_ctxp->live_switch_vars->remove (t);
1447 if (ret_clauses)
1449 gomp_target *stmt;
1450 gimple_stmt_iterator si = gsi_start (cleanup);
1452 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1453 ret_clauses);
1454 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1457 if (cleanup)
1459 gtry *gs;
1460 gimple_seq new_body;
1462 new_body = NULL;
1463 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1464 GIMPLE_TRY_FINALLY);
1466 if (stack_save)
1467 gimplify_seq_add_stmt (&new_body, stack_save);
1468 gimplify_seq_add_stmt (&new_body, gs);
1469 gimple_bind_set_body (bind_stmt, new_body);
1472 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1473 if (!gimplify_ctxp->keep_stack)
1474 gimplify_ctxp->keep_stack = old_keep_stack;
1475 gimplify_ctxp->save_stack = old_save_stack;
1477 gimple_pop_bind_expr ();
1479 gimplify_seq_add_stmt (pre_p, bind_stmt);
1481 if (temp)
1483 *expr_p = temp;
1484 return GS_OK;
1487 *expr_p = NULL_TREE;
1488 return GS_ALL_DONE;
1491 /* Maybe add early return predict statement to PRE_P sequence. */
1493 static void
1494 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1496 /* If we are not in a conditional context, add PREDICT statement. */
1497 if (gimple_conditional_context ())
1499 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1500 NOT_TAKEN);
1501 gimplify_seq_add_stmt (pre_p, predict);
1505 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1506 GIMPLE value, it is assigned to a new temporary and the statement is
1507 re-written to return the temporary.
1509 PRE_P points to the sequence where side effects that must happen before
1510 STMT should be stored. */
1512 static enum gimplify_status
1513 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1515 greturn *ret;
1516 tree ret_expr = TREE_OPERAND (stmt, 0);
1517 tree result_decl, result;
1519 if (ret_expr == error_mark_node)
1520 return GS_ERROR;
1522 if (!ret_expr
1523 || TREE_CODE (ret_expr) == RESULT_DECL)
1525 maybe_add_early_return_predict_stmt (pre_p);
1526 greturn *ret = gimple_build_return (ret_expr);
1527 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1528 gimplify_seq_add_stmt (pre_p, ret);
1529 return GS_ALL_DONE;
1532 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1533 result_decl = NULL_TREE;
1534 else
1536 result_decl = TREE_OPERAND (ret_expr, 0);
1538 /* See through a return by reference. */
1539 if (TREE_CODE (result_decl) == INDIRECT_REF)
1540 result_decl = TREE_OPERAND (result_decl, 0);
1542 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1543 || TREE_CODE (ret_expr) == INIT_EXPR)
1544 && TREE_CODE (result_decl) == RESULT_DECL);
1547 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1548 Recall that aggregate_value_p is FALSE for any aggregate type that is
1549 returned in registers. If we're returning values in registers, then
1550 we don't want to extend the lifetime of the RESULT_DECL, particularly
1551 across another call. In addition, for those aggregates for which
1552 hard_function_value generates a PARALLEL, we'll die during normal
1553 expansion of structure assignments; there's special code in expand_return
1554 to handle this case that does not exist in expand_expr. */
1555 if (!result_decl)
1556 result = NULL_TREE;
1557 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1559 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1561 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1562 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1563 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1564 should be effectively allocated by the caller, i.e. all calls to
1565 this function must be subject to the Return Slot Optimization. */
1566 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1567 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1569 result = result_decl;
1571 else if (gimplify_ctxp->return_temp)
1572 result = gimplify_ctxp->return_temp;
1573 else
1575 result = create_tmp_reg (TREE_TYPE (result_decl));
1577 /* ??? With complex control flow (usually involving abnormal edges),
1578 we can wind up warning about an uninitialized value for this. Due
1579 to how this variable is constructed and initialized, this is never
1580 true. Give up and never warn. */
1581 TREE_NO_WARNING (result) = 1;
1583 gimplify_ctxp->return_temp = result;
1586 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1587 Then gimplify the whole thing. */
1588 if (result != result_decl)
1589 TREE_OPERAND (ret_expr, 0) = result;
1591 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1593 maybe_add_early_return_predict_stmt (pre_p);
1594 ret = gimple_build_return (result);
1595 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1596 gimplify_seq_add_stmt (pre_p, ret);
1598 return GS_ALL_DONE;
1601 /* Gimplify a variable-length array DECL. */
1603 static void
1604 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1606 /* This is a variable-sized decl. Simplify its size and mark it
1607 for deferred expansion. */
1608 tree t, addr, ptr_type;
1610 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1611 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1613 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1614 if (DECL_HAS_VALUE_EXPR_P (decl))
1615 return;
1617 /* All occurrences of this decl in final gimplified code will be
1618 replaced by indirection. Setting DECL_VALUE_EXPR does two
1619 things: First, it lets the rest of the gimplifier know what
1620 replacement to use. Second, it lets the debug info know
1621 where to find the value. */
1622 ptr_type = build_pointer_type (TREE_TYPE (decl));
1623 addr = create_tmp_var (ptr_type, get_name (decl));
1624 DECL_IGNORED_P (addr) = 0;
1625 t = build_fold_indirect_ref (addr);
1626 TREE_THIS_NOTRAP (t) = 1;
1627 SET_DECL_VALUE_EXPR (decl, t);
1628 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1630 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1631 max_int_size_in_bytes (TREE_TYPE (decl)));
1632 /* The call has been built for a variable-sized object. */
1633 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1634 t = fold_convert (ptr_type, t);
1635 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1637 gimplify_and_add (t, seq_p);
1640 /* A helper function to be called via walk_tree. Mark all labels under *TP
1641 as being forced. To be called for DECL_INITIAL of static variables. */
1643 static tree
1644 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1646 if (TYPE_P (*tp))
1647 *walk_subtrees = 0;
1648 if (TREE_CODE (*tp) == LABEL_DECL)
1650 FORCED_LABEL (*tp) = 1;
1651 cfun->has_forced_label_in_static = 1;
1654 return NULL_TREE;
1657 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1658 and initialization explicit. */
1660 static enum gimplify_status
1661 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1663 tree stmt = *stmt_p;
1664 tree decl = DECL_EXPR_DECL (stmt);
1666 *stmt_p = NULL_TREE;
1668 if (TREE_TYPE (decl) == error_mark_node)
1669 return GS_ERROR;
1671 if ((TREE_CODE (decl) == TYPE_DECL
1672 || VAR_P (decl))
1673 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1675 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1676 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1677 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1680 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1681 in case its size expressions contain problematic nodes like CALL_EXPR. */
1682 if (TREE_CODE (decl) == TYPE_DECL
1683 && DECL_ORIGINAL_TYPE (decl)
1684 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1686 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1687 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1688 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1691 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1693 tree init = DECL_INITIAL (decl);
1694 bool is_vla = false;
1696 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1697 || (!TREE_STATIC (decl)
1698 && flag_stack_check == GENERIC_STACK_CHECK
1699 && compare_tree_int (DECL_SIZE_UNIT (decl),
1700 STACK_CHECK_MAX_VAR_SIZE) > 0))
1702 gimplify_vla_decl (decl, seq_p);
1703 is_vla = true;
1706 if (asan_poisoned_variables
1707 && !is_vla
1708 && TREE_ADDRESSABLE (decl)
1709 && !TREE_STATIC (decl)
1710 && !DECL_HAS_VALUE_EXPR_P (decl)
1711 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1712 && dbg_cnt (asan_use_after_scope)
1713 && !gimplify_omp_ctxp)
1715 asan_poisoned_variables->add (decl);
1716 asan_poison_variable (decl, false, seq_p);
1717 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1718 gimplify_ctxp->live_switch_vars->add (decl);
1721 /* Some front ends do not explicitly declare all anonymous
1722 artificial variables. We compensate here by declaring the
1723 variables, though it would be better if the front ends would
1724 explicitly declare them. */
1725 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1726 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1727 gimple_add_tmp_var (decl);
1729 if (init && init != error_mark_node)
1731 if (!TREE_STATIC (decl))
1733 DECL_INITIAL (decl) = NULL_TREE;
1734 init = build2 (INIT_EXPR, void_type_node, decl, init);
1735 gimplify_and_add (init, seq_p);
1736 ggc_free (init);
1738 else
1739 /* We must still examine initializers for static variables
1740 as they may contain a label address. */
1741 walk_tree (&init, force_labels_r, NULL, NULL);
1745 return GS_ALL_DONE;
1748 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1749 and replacing the LOOP_EXPR with goto, but if the loop contains an
1750 EXIT_EXPR, we need to append a label for it to jump to. */
1752 static enum gimplify_status
1753 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1755 tree saved_label = gimplify_ctxp->exit_label;
1756 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1758 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1760 gimplify_ctxp->exit_label = NULL_TREE;
1762 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1764 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1766 if (gimplify_ctxp->exit_label)
1767 gimplify_seq_add_stmt (pre_p,
1768 gimple_build_label (gimplify_ctxp->exit_label));
1770 gimplify_ctxp->exit_label = saved_label;
1772 *expr_p = NULL;
1773 return GS_ALL_DONE;
1776 /* Gimplify a statement list onto a sequence. These may be created either
1777 by an enlightened front-end, or by shortcut_cond_expr. */
1779 static enum gimplify_status
1780 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1782 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1784 tree_stmt_iterator i = tsi_start (*expr_p);
1786 while (!tsi_end_p (i))
1788 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1789 tsi_delink (&i);
1792 if (temp)
1794 *expr_p = temp;
1795 return GS_OK;
1798 return GS_ALL_DONE;
1801 /* Callback for walk_gimple_seq. */
1803 static tree
1804 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1805 struct walk_stmt_info *wi)
1807 gimple *stmt = gsi_stmt (*gsi_p);
1809 *handled_ops_p = true;
1810 switch (gimple_code (stmt))
1812 case GIMPLE_TRY:
1813 /* A compiler-generated cleanup or a user-written try block.
1814 If it's empty, don't dive into it--that would result in
1815 worse location info. */
1816 if (gimple_try_eval (stmt) == NULL)
1818 wi->info = stmt;
1819 return integer_zero_node;
1821 /* Fall through. */
1822 case GIMPLE_BIND:
1823 case GIMPLE_CATCH:
1824 case GIMPLE_EH_FILTER:
1825 case GIMPLE_TRANSACTION:
1826 /* Walk the sub-statements. */
1827 *handled_ops_p = false;
1828 break;
1830 case GIMPLE_DEBUG:
1831 /* Ignore these. We may generate them before declarations that
1832 are never executed. If there's something to warn about,
1833 there will be non-debug stmts too, and we'll catch those. */
1834 break;
1836 case GIMPLE_CALL:
1837 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1839 *handled_ops_p = false;
1840 break;
1842 /* Fall through. */
1843 default:
1844 /* Save the first "real" statement (not a decl/lexical scope/...). */
1845 wi->info = stmt;
1846 return integer_zero_node;
1848 return NULL_TREE;
1851 /* Possibly warn about unreachable statements between switch's controlling
1852 expression and the first case. SEQ is the body of a switch expression. */
1854 static void
1855 maybe_warn_switch_unreachable (gimple_seq seq)
1857 if (!warn_switch_unreachable
1858 /* This warning doesn't play well with Fortran when optimizations
1859 are on. */
1860 || lang_GNU_Fortran ()
1861 || seq == NULL)
1862 return;
1864 struct walk_stmt_info wi;
1865 memset (&wi, 0, sizeof (wi));
1866 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1867 gimple *stmt = (gimple *) wi.info;
1869 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1871 if (gimple_code (stmt) == GIMPLE_GOTO
1872 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1873 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1874 /* Don't warn for compiler-generated gotos. These occur
1875 in Duff's devices, for example. */;
1876 else
1877 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1878 "statement will never be executed");
1883 /* A label entry that pairs label and a location. */
1884 struct label_entry
1886 tree label;
1887 location_t loc;
1890 /* Find LABEL in vector of label entries VEC. */
1892 static struct label_entry *
1893 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1895 unsigned int i;
1896 struct label_entry *l;
1898 FOR_EACH_VEC_ELT (*vec, i, l)
1899 if (l->label == label)
1900 return l;
1901 return NULL;
1904 /* Return true if LABEL, a LABEL_DECL, represents a case label
1905 in a vector of labels CASES. */
1907 static bool
1908 case_label_p (const vec<tree> *cases, tree label)
1910 unsigned int i;
1911 tree l;
1913 FOR_EACH_VEC_ELT (*cases, i, l)
1914 if (CASE_LABEL (l) == label)
1915 return true;
1916 return false;
1919 /* Find the last nondebug statement in a scope STMT. */
1921 static gimple *
1922 last_stmt_in_scope (gimple *stmt)
1924 if (!stmt)
1925 return NULL;
1927 switch (gimple_code (stmt))
1929 case GIMPLE_BIND:
1931 gbind *bind = as_a <gbind *> (stmt);
1932 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
1933 return last_stmt_in_scope (stmt);
1936 case GIMPLE_TRY:
1938 gtry *try_stmt = as_a <gtry *> (stmt);
1939 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
1940 gimple *last_eval = last_stmt_in_scope (stmt);
1941 if (gimple_stmt_may_fallthru (last_eval)
1942 && (last_eval == NULL
1943 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1944 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1946 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
1947 return last_stmt_in_scope (stmt);
1949 else
1950 return last_eval;
1953 case GIMPLE_DEBUG:
1954 gcc_unreachable ();
1956 default:
1957 return stmt;
1961 /* Collect interesting labels in LABELS and return the statement preceding
1962 another case label, or a user-defined label. */
1964 static gimple *
1965 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1966 auto_vec <struct label_entry> *labels)
1968 gimple *prev = NULL;
1972 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
1974 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
1975 which starts on a GIMPLE_SWITCH and ends with a break label.
1976 Handle that as a single statement that can fall through. */
1977 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
1978 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
1979 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
1980 if (last
1981 && gimple_code (first) == GIMPLE_SWITCH
1982 && gimple_code (last) == GIMPLE_LABEL)
1984 tree label = gimple_label_label (as_a <glabel *> (last));
1985 if (SWITCH_BREAK_LABEL_P (label))
1987 prev = bind;
1988 gsi_next (gsi_p);
1989 continue;
1993 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1994 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1996 /* Nested scope. Only look at the last statement of
1997 the innermost scope. */
1998 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1999 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2000 if (last)
2002 prev = last;
2003 /* It might be a label without a location. Use the
2004 location of the scope then. */
2005 if (!gimple_has_location (prev))
2006 gimple_set_location (prev, bind_loc);
2008 gsi_next (gsi_p);
2009 continue;
2012 /* Ifs are tricky. */
2013 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2015 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2016 tree false_lab = gimple_cond_false_label (cond_stmt);
2017 location_t if_loc = gimple_location (cond_stmt);
2019 /* If we have e.g.
2020 if (i > 1) goto <D.2259>; else goto D;
2021 we can't do much with the else-branch. */
2022 if (!DECL_ARTIFICIAL (false_lab))
2023 break;
2025 /* Go on until the false label, then one step back. */
2026 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2028 gimple *stmt = gsi_stmt (*gsi_p);
2029 if (gimple_code (stmt) == GIMPLE_LABEL
2030 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2031 break;
2034 /* Not found? Oops. */
2035 if (gsi_end_p (*gsi_p))
2036 break;
2038 struct label_entry l = { false_lab, if_loc };
2039 labels->safe_push (l);
2041 /* Go to the last statement of the then branch. */
2042 gsi_prev (gsi_p);
2044 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2045 <D.1759>:
2046 <stmt>;
2047 goto <D.1761>;
2048 <D.1760>:
2050 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2051 && !gimple_has_location (gsi_stmt (*gsi_p)))
2053 /* Look at the statement before, it might be
2054 attribute fallthrough, in which case don't warn. */
2055 gsi_prev (gsi_p);
2056 bool fallthru_before_dest
2057 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2058 gsi_next (gsi_p);
2059 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2060 if (!fallthru_before_dest)
2062 struct label_entry l = { goto_dest, if_loc };
2063 labels->safe_push (l);
2066 /* And move back. */
2067 gsi_next (gsi_p);
2070 /* Remember the last statement. Skip labels that are of no interest
2071 to us. */
2072 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2074 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2075 if (find_label_entry (labels, label))
2076 prev = gsi_stmt (*gsi_p);
2078 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2080 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2081 prev = gsi_stmt (*gsi_p);
2082 gsi_next (gsi_p);
2084 while (!gsi_end_p (*gsi_p)
2085 /* Stop if we find a case or a user-defined label. */
2086 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2087 || !gimple_has_location (gsi_stmt (*gsi_p))));
2089 return prev;
2092 /* Return true if the switch fallthough warning should occur. LABEL is
2093 the label statement that we're falling through to. */
2095 static bool
2096 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2098 gimple_stmt_iterator gsi = *gsi_p;
2100 /* Don't warn if the label is marked with a "falls through" comment. */
2101 if (FALLTHROUGH_LABEL_P (label))
2102 return false;
2104 /* Don't warn for non-case labels followed by a statement:
2105 case 0:
2106 foo ();
2107 label:
2108 bar ();
2109 as these are likely intentional. */
2110 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2112 tree l;
2113 while (!gsi_end_p (gsi)
2114 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2115 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2116 && !case_label_p (&gimplify_ctxp->case_labels, l))
2117 gsi_next_nondebug (&gsi);
2118 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2119 return false;
2122 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2123 immediately breaks. */
2124 gsi = *gsi_p;
2126 /* Skip all immediately following labels. */
2127 while (!gsi_end_p (gsi)
2128 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2129 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2130 gsi_next_nondebug (&gsi);
2132 /* { ... something; default:; } */
2133 if (gsi_end_p (gsi)
2134 /* { ... something; default: break; } or
2135 { ... something; default: goto L; } */
2136 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2137 /* { ... something; default: return; } */
2138 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2139 return false;
2141 return true;
2144 /* Callback for walk_gimple_seq. */
2146 static tree
2147 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2148 struct walk_stmt_info *)
2150 gimple *stmt = gsi_stmt (*gsi_p);
2152 *handled_ops_p = true;
2153 switch (gimple_code (stmt))
2155 case GIMPLE_TRY:
2156 case GIMPLE_BIND:
2157 case GIMPLE_CATCH:
2158 case GIMPLE_EH_FILTER:
2159 case GIMPLE_TRANSACTION:
2160 /* Walk the sub-statements. */
2161 *handled_ops_p = false;
2162 break;
2164 /* Find a sequence of form:
2166 GIMPLE_LABEL
2167 [...]
2168 <may fallthru stmt>
2169 GIMPLE_LABEL
2171 and possibly warn. */
2172 case GIMPLE_LABEL:
2174 /* Found a label. Skip all immediately following labels. */
2175 while (!gsi_end_p (*gsi_p)
2176 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2177 gsi_next_nondebug (gsi_p);
2179 /* There might be no more statements. */
2180 if (gsi_end_p (*gsi_p))
2181 return integer_zero_node;
2183 /* Vector of labels that fall through. */
2184 auto_vec <struct label_entry> labels;
2185 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2187 /* There might be no more statements. */
2188 if (gsi_end_p (*gsi_p))
2189 return integer_zero_node;
2191 gimple *next = gsi_stmt (*gsi_p);
2192 tree label;
2193 /* If what follows is a label, then we may have a fallthrough. */
2194 if (gimple_code (next) == GIMPLE_LABEL
2195 && gimple_has_location (next)
2196 && (label = gimple_label_label (as_a <glabel *> (next)))
2197 && prev != NULL)
2199 struct label_entry *l;
2200 bool warned_p = false;
2201 auto_diagnostic_group d;
2202 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2203 /* Quiet. */;
2204 else if (gimple_code (prev) == GIMPLE_LABEL
2205 && (label = gimple_label_label (as_a <glabel *> (prev)))
2206 && (l = find_label_entry (&labels, label)))
2207 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2208 "this statement may fall through");
2209 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2210 /* Try to be clever and don't warn when the statement
2211 can't actually fall through. */
2212 && gimple_stmt_may_fallthru (prev)
2213 && gimple_has_location (prev))
2214 warned_p = warning_at (gimple_location (prev),
2215 OPT_Wimplicit_fallthrough_,
2216 "this statement may fall through");
2217 if (warned_p)
2218 inform (gimple_location (next), "here");
2220 /* Mark this label as processed so as to prevent multiple
2221 warnings in nested switches. */
2222 FALLTHROUGH_LABEL_P (label) = true;
2224 /* So that next warn_implicit_fallthrough_r will start looking for
2225 a new sequence starting with this label. */
2226 gsi_prev (gsi_p);
2229 break;
2230 default:
2231 break;
2233 return NULL_TREE;
2236 /* Warn when a switch case falls through. */
2238 static void
2239 maybe_warn_implicit_fallthrough (gimple_seq seq)
2241 if (!warn_implicit_fallthrough)
2242 return;
2244 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2245 if (!(lang_GNU_C ()
2246 || lang_GNU_CXX ()
2247 || lang_GNU_OBJC ()))
2248 return;
2250 struct walk_stmt_info wi;
2251 memset (&wi, 0, sizeof (wi));
2252 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2255 /* Callback for walk_gimple_seq. */
2257 static tree
2258 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2259 struct walk_stmt_info *)
2261 gimple *stmt = gsi_stmt (*gsi_p);
2263 *handled_ops_p = true;
2264 switch (gimple_code (stmt))
2266 case GIMPLE_TRY:
2267 case GIMPLE_BIND:
2268 case GIMPLE_CATCH:
2269 case GIMPLE_EH_FILTER:
2270 case GIMPLE_TRANSACTION:
2271 /* Walk the sub-statements. */
2272 *handled_ops_p = false;
2273 break;
2274 case GIMPLE_CALL:
2275 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2277 gsi_remove (gsi_p, true);
2278 if (gsi_end_p (*gsi_p))
2279 return integer_zero_node;
2281 bool found = false;
2282 location_t loc = gimple_location (stmt);
2284 gimple_stmt_iterator gsi2 = *gsi_p;
2285 stmt = gsi_stmt (gsi2);
2286 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2288 /* Go on until the artificial label. */
2289 tree goto_dest = gimple_goto_dest (stmt);
2290 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2292 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2293 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2294 == goto_dest)
2295 break;
2298 /* Not found? Stop. */
2299 if (gsi_end_p (gsi2))
2300 break;
2302 /* Look one past it. */
2303 gsi_next (&gsi2);
2306 /* We're looking for a case label or default label here. */
2307 while (!gsi_end_p (gsi2))
2309 stmt = gsi_stmt (gsi2);
2310 if (gimple_code (stmt) == GIMPLE_LABEL)
2312 tree label = gimple_label_label (as_a <glabel *> (stmt));
2313 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2315 found = true;
2316 break;
2319 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2321 else if (!is_gimple_debug (stmt))
2322 /* Anything else is not expected. */
2323 break;
2324 gsi_next (&gsi2);
2326 if (!found)
2327 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2328 "a case label or default label");
2330 break;
2331 default:
2332 break;
2334 return NULL_TREE;
2337 /* Expand all FALLTHROUGH () calls in SEQ. */
2339 static void
2340 expand_FALLTHROUGH (gimple_seq *seq_p)
2342 struct walk_stmt_info wi;
2343 memset (&wi, 0, sizeof (wi));
2344 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2348 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2349 branch to. */
2351 static enum gimplify_status
2352 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2354 tree switch_expr = *expr_p;
2355 gimple_seq switch_body_seq = NULL;
2356 enum gimplify_status ret;
2357 tree index_type = TREE_TYPE (switch_expr);
2358 if (index_type == NULL_TREE)
2359 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2361 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2362 fb_rvalue);
2363 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2364 return ret;
2366 if (SWITCH_BODY (switch_expr))
2368 vec<tree> labels;
2369 vec<tree> saved_labels;
2370 hash_set<tree> *saved_live_switch_vars = NULL;
2371 tree default_case = NULL_TREE;
2372 gswitch *switch_stmt;
2374 /* Save old labels, get new ones from body, then restore the old
2375 labels. Save all the things from the switch body to append after. */
2376 saved_labels = gimplify_ctxp->case_labels;
2377 gimplify_ctxp->case_labels.create (8);
2379 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2380 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2381 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2382 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2383 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2384 else
2385 gimplify_ctxp->live_switch_vars = NULL;
2387 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2388 gimplify_ctxp->in_switch_expr = true;
2390 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2392 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2393 maybe_warn_switch_unreachable (switch_body_seq);
2394 maybe_warn_implicit_fallthrough (switch_body_seq);
2395 /* Only do this for the outermost GIMPLE_SWITCH. */
2396 if (!gimplify_ctxp->in_switch_expr)
2397 expand_FALLTHROUGH (&switch_body_seq);
2399 labels = gimplify_ctxp->case_labels;
2400 gimplify_ctxp->case_labels = saved_labels;
2402 if (gimplify_ctxp->live_switch_vars)
2404 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2405 delete gimplify_ctxp->live_switch_vars;
2407 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2409 preprocess_case_label_vec_for_gimple (labels, index_type,
2410 &default_case);
2412 bool add_bind = false;
2413 if (!default_case)
2415 glabel *new_default;
2417 default_case
2418 = build_case_label (NULL_TREE, NULL_TREE,
2419 create_artificial_label (UNKNOWN_LOCATION));
2420 if (old_in_switch_expr)
2422 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2423 add_bind = true;
2425 new_default = gimple_build_label (CASE_LABEL (default_case));
2426 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2428 else if (old_in_switch_expr)
2430 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2431 if (last && gimple_code (last) == GIMPLE_LABEL)
2433 tree label = gimple_label_label (as_a <glabel *> (last));
2434 if (SWITCH_BREAK_LABEL_P (label))
2435 add_bind = true;
2439 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2440 default_case, labels);
2441 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2442 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2443 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2444 so that we can easily find the start and end of the switch
2445 statement. */
2446 if (add_bind)
2448 gimple_seq bind_body = NULL;
2449 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2450 gimple_seq_add_seq (&bind_body, switch_body_seq);
2451 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2452 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2453 gimplify_seq_add_stmt (pre_p, bind);
2455 else
2457 gimplify_seq_add_stmt (pre_p, switch_stmt);
2458 gimplify_seq_add_seq (pre_p, switch_body_seq);
2460 labels.release ();
2462 else
2463 gcc_unreachable ();
2465 return GS_ALL_DONE;
2468 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2470 static enum gimplify_status
2471 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2473 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2474 == current_function_decl);
2476 tree label = LABEL_EXPR_LABEL (*expr_p);
2477 glabel *label_stmt = gimple_build_label (label);
2478 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2479 gimplify_seq_add_stmt (pre_p, label_stmt);
2481 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2482 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2483 NOT_TAKEN));
2484 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2485 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2486 TAKEN));
2488 return GS_ALL_DONE;
2491 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2493 static enum gimplify_status
2494 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2496 struct gimplify_ctx *ctxp;
2497 glabel *label_stmt;
2499 /* Invalid programs can play Duff's Device type games with, for example,
2500 #pragma omp parallel. At least in the C front end, we don't
2501 detect such invalid branches until after gimplification, in the
2502 diagnose_omp_blocks pass. */
2503 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2504 if (ctxp->case_labels.exists ())
2505 break;
2507 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2508 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2509 ctxp->case_labels.safe_push (*expr_p);
2510 gimplify_seq_add_stmt (pre_p, label_stmt);
2512 return GS_ALL_DONE;
2515 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2516 if necessary. */
2518 tree
2519 build_and_jump (tree *label_p)
2521 if (label_p == NULL)
2522 /* If there's nowhere to jump, just fall through. */
2523 return NULL_TREE;
2525 if (*label_p == NULL_TREE)
2527 tree label = create_artificial_label (UNKNOWN_LOCATION);
2528 *label_p = label;
2531 return build1 (GOTO_EXPR, void_type_node, *label_p);
2534 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2535 This also involves building a label to jump to and communicating it to
2536 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2538 static enum gimplify_status
2539 gimplify_exit_expr (tree *expr_p)
2541 tree cond = TREE_OPERAND (*expr_p, 0);
2542 tree expr;
2544 expr = build_and_jump (&gimplify_ctxp->exit_label);
2545 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2546 *expr_p = expr;
2548 return GS_OK;
2551 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2552 different from its canonical type, wrap the whole thing inside a
2553 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2554 type.
2556 The canonical type of a COMPONENT_REF is the type of the field being
2557 referenced--unless the field is a bit-field which can be read directly
2558 in a smaller mode, in which case the canonical type is the
2559 sign-appropriate type corresponding to that mode. */
2561 static void
2562 canonicalize_component_ref (tree *expr_p)
2564 tree expr = *expr_p;
2565 tree type;
2567 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2569 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2570 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2571 else
2572 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2574 /* One could argue that all the stuff below is not necessary for
2575 the non-bitfield case and declare it a FE error if type
2576 adjustment would be needed. */
2577 if (TREE_TYPE (expr) != type)
2579 #ifdef ENABLE_TYPES_CHECKING
2580 tree old_type = TREE_TYPE (expr);
2581 #endif
2582 int type_quals;
2584 /* We need to preserve qualifiers and propagate them from
2585 operand 0. */
2586 type_quals = TYPE_QUALS (type)
2587 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2588 if (TYPE_QUALS (type) != type_quals)
2589 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2591 /* Set the type of the COMPONENT_REF to the underlying type. */
2592 TREE_TYPE (expr) = type;
2594 #ifdef ENABLE_TYPES_CHECKING
2595 /* It is now a FE error, if the conversion from the canonical
2596 type to the original expression type is not useless. */
2597 gcc_assert (useless_type_conversion_p (old_type, type));
2598 #endif
2602 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2603 to foo, embed that change in the ADDR_EXPR by converting
2604 T array[U];
2605 (T *)&array
2607 &array[L]
2608 where L is the lower bound. For simplicity, only do this for constant
2609 lower bound.
2610 The constraint is that the type of &array[L] is trivially convertible
2611 to T *. */
2613 static void
2614 canonicalize_addr_expr (tree *expr_p)
2616 tree expr = *expr_p;
2617 tree addr_expr = TREE_OPERAND (expr, 0);
2618 tree datype, ddatype, pddatype;
2620 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2621 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2622 || TREE_CODE (addr_expr) != ADDR_EXPR)
2623 return;
2625 /* The addr_expr type should be a pointer to an array. */
2626 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2627 if (TREE_CODE (datype) != ARRAY_TYPE)
2628 return;
2630 /* The pointer to element type shall be trivially convertible to
2631 the expression pointer type. */
2632 ddatype = TREE_TYPE (datype);
2633 pddatype = build_pointer_type (ddatype);
2634 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2635 pddatype))
2636 return;
2638 /* The lower bound and element sizes must be constant. */
2639 if (!TYPE_SIZE_UNIT (ddatype)
2640 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2641 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2642 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2643 return;
2645 /* All checks succeeded. Build a new node to merge the cast. */
2646 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2647 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2648 NULL_TREE, NULL_TREE);
2649 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2651 /* We can have stripped a required restrict qualifier above. */
2652 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2653 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2656 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2657 underneath as appropriate. */
2659 static enum gimplify_status
2660 gimplify_conversion (tree *expr_p)
2662 location_t loc = EXPR_LOCATION (*expr_p);
2663 gcc_assert (CONVERT_EXPR_P (*expr_p));
2665 /* Then strip away all but the outermost conversion. */
2666 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2668 /* And remove the outermost conversion if it's useless. */
2669 if (tree_ssa_useless_type_conversion (*expr_p))
2670 *expr_p = TREE_OPERAND (*expr_p, 0);
2672 /* If we still have a conversion at the toplevel,
2673 then canonicalize some constructs. */
2674 if (CONVERT_EXPR_P (*expr_p))
2676 tree sub = TREE_OPERAND (*expr_p, 0);
2678 /* If a NOP conversion is changing the type of a COMPONENT_REF
2679 expression, then canonicalize its type now in order to expose more
2680 redundant conversions. */
2681 if (TREE_CODE (sub) == COMPONENT_REF)
2682 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2684 /* If a NOP conversion is changing a pointer to array of foo
2685 to a pointer to foo, embed that change in the ADDR_EXPR. */
2686 else if (TREE_CODE (sub) == ADDR_EXPR)
2687 canonicalize_addr_expr (expr_p);
2690 /* If we have a conversion to a non-register type force the
2691 use of a VIEW_CONVERT_EXPR instead. */
2692 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2693 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2694 TREE_OPERAND (*expr_p, 0));
2696 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2697 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2698 TREE_SET_CODE (*expr_p, NOP_EXPR);
2700 return GS_OK;
2703 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2704 DECL_VALUE_EXPR, and it's worth re-examining things. */
2706 static enum gimplify_status
2707 gimplify_var_or_parm_decl (tree *expr_p)
2709 tree decl = *expr_p;
2711 /* ??? If this is a local variable, and it has not been seen in any
2712 outer BIND_EXPR, then it's probably the result of a duplicate
2713 declaration, for which we've already issued an error. It would
2714 be really nice if the front end wouldn't leak these at all.
2715 Currently the only known culprit is C++ destructors, as seen
2716 in g++.old-deja/g++.jason/binding.C. */
2717 if (VAR_P (decl)
2718 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2719 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2720 && decl_function_context (decl) == current_function_decl)
2722 gcc_assert (seen_error ());
2723 return GS_ERROR;
2726 /* When within an OMP context, notice uses of variables. */
2727 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2728 return GS_ALL_DONE;
2730 /* If the decl is an alias for another expression, substitute it now. */
2731 if (DECL_HAS_VALUE_EXPR_P (decl))
2733 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2734 return GS_OK;
2737 return GS_ALL_DONE;
2740 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2742 static void
2743 recalculate_side_effects (tree t)
2745 enum tree_code code = TREE_CODE (t);
2746 int len = TREE_OPERAND_LENGTH (t);
2747 int i;
2749 switch (TREE_CODE_CLASS (code))
2751 case tcc_expression:
2752 switch (code)
2754 case INIT_EXPR:
2755 case MODIFY_EXPR:
2756 case VA_ARG_EXPR:
2757 case PREDECREMENT_EXPR:
2758 case PREINCREMENT_EXPR:
2759 case POSTDECREMENT_EXPR:
2760 case POSTINCREMENT_EXPR:
2761 /* All of these have side-effects, no matter what their
2762 operands are. */
2763 return;
2765 default:
2766 break;
2768 /* Fall through. */
2770 case tcc_comparison: /* a comparison expression */
2771 case tcc_unary: /* a unary arithmetic expression */
2772 case tcc_binary: /* a binary arithmetic expression */
2773 case tcc_reference: /* a reference */
2774 case tcc_vl_exp: /* a function call */
2775 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2776 for (i = 0; i < len; ++i)
2778 tree op = TREE_OPERAND (t, i);
2779 if (op && TREE_SIDE_EFFECTS (op))
2780 TREE_SIDE_EFFECTS (t) = 1;
2782 break;
2784 case tcc_constant:
2785 /* No side-effects. */
2786 return;
2788 default:
2789 gcc_unreachable ();
2793 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2794 node *EXPR_P.
2796 compound_lval
2797 : min_lval '[' val ']'
2798 | min_lval '.' ID
2799 | compound_lval '[' val ']'
2800 | compound_lval '.' ID
2802 This is not part of the original SIMPLE definition, which separates
2803 array and member references, but it seems reasonable to handle them
2804 together. Also, this way we don't run into problems with union
2805 aliasing; gcc requires that for accesses through a union to alias, the
2806 union reference must be explicit, which was not always the case when we
2807 were splitting up array and member refs.
2809 PRE_P points to the sequence where side effects that must happen before
2810 *EXPR_P should be stored.
2812 POST_P points to the sequence where side effects that must happen after
2813 *EXPR_P should be stored. */
2815 static enum gimplify_status
2816 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2817 fallback_t fallback)
2819 tree *p;
2820 enum gimplify_status ret = GS_ALL_DONE, tret;
2821 int i;
2822 location_t loc = EXPR_LOCATION (*expr_p);
2823 tree expr = *expr_p;
2825 /* Create a stack of the subexpressions so later we can walk them in
2826 order from inner to outer. */
2827 auto_vec<tree, 10> expr_stack;
2829 /* We can handle anything that get_inner_reference can deal with. */
2830 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2832 restart:
2833 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2834 if (TREE_CODE (*p) == INDIRECT_REF)
2835 *p = fold_indirect_ref_loc (loc, *p);
2837 if (handled_component_p (*p))
2839 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2840 additional COMPONENT_REFs. */
2841 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2842 && gimplify_var_or_parm_decl (p) == GS_OK)
2843 goto restart;
2844 else
2845 break;
2847 expr_stack.safe_push (*p);
2850 gcc_assert (expr_stack.length ());
2852 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2853 walked through and P points to the innermost expression.
2855 Java requires that we elaborated nodes in source order. That
2856 means we must gimplify the inner expression followed by each of
2857 the indices, in order. But we can't gimplify the inner
2858 expression until we deal with any variable bounds, sizes, or
2859 positions in order to deal with PLACEHOLDER_EXPRs.
2861 So we do this in three steps. First we deal with the annotations
2862 for any variables in the components, then we gimplify the base,
2863 then we gimplify any indices, from left to right. */
2864 for (i = expr_stack.length () - 1; i >= 0; i--)
2866 tree t = expr_stack[i];
2868 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2870 /* Gimplify the low bound and element type size and put them into
2871 the ARRAY_REF. If these values are set, they have already been
2872 gimplified. */
2873 if (TREE_OPERAND (t, 2) == NULL_TREE)
2875 tree low = unshare_expr (array_ref_low_bound (t));
2876 if (!is_gimple_min_invariant (low))
2878 TREE_OPERAND (t, 2) = low;
2879 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2880 post_p, is_gimple_reg,
2881 fb_rvalue);
2882 ret = MIN (ret, tret);
2885 else
2887 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2888 is_gimple_reg, fb_rvalue);
2889 ret = MIN (ret, tret);
2892 if (TREE_OPERAND (t, 3) == NULL_TREE)
2894 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2895 tree elmt_size = unshare_expr (array_ref_element_size (t));
2896 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2898 /* Divide the element size by the alignment of the element
2899 type (above). */
2900 elmt_size
2901 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2903 if (!is_gimple_min_invariant (elmt_size))
2905 TREE_OPERAND (t, 3) = elmt_size;
2906 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2907 post_p, is_gimple_reg,
2908 fb_rvalue);
2909 ret = MIN (ret, tret);
2912 else
2914 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2915 is_gimple_reg, fb_rvalue);
2916 ret = MIN (ret, tret);
2919 else if (TREE_CODE (t) == COMPONENT_REF)
2921 /* Set the field offset into T and gimplify it. */
2922 if (TREE_OPERAND (t, 2) == NULL_TREE)
2924 tree offset = unshare_expr (component_ref_field_offset (t));
2925 tree field = TREE_OPERAND (t, 1);
2926 tree factor
2927 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2929 /* Divide the offset by its alignment. */
2930 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2932 if (!is_gimple_min_invariant (offset))
2934 TREE_OPERAND (t, 2) = offset;
2935 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2936 post_p, is_gimple_reg,
2937 fb_rvalue);
2938 ret = MIN (ret, tret);
2941 else
2943 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2944 is_gimple_reg, fb_rvalue);
2945 ret = MIN (ret, tret);
2950 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2951 so as to match the min_lval predicate. Failure to do so may result
2952 in the creation of large aggregate temporaries. */
2953 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2954 fallback | fb_lvalue);
2955 ret = MIN (ret, tret);
2957 /* And finally, the indices and operands of ARRAY_REF. During this
2958 loop we also remove any useless conversions. */
2959 for (; expr_stack.length () > 0; )
2961 tree t = expr_stack.pop ();
2963 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2965 /* Gimplify the dimension. */
2966 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2968 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2969 is_gimple_val, fb_rvalue);
2970 ret = MIN (ret, tret);
2974 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2976 /* The innermost expression P may have originally had
2977 TREE_SIDE_EFFECTS set which would have caused all the outer
2978 expressions in *EXPR_P leading to P to also have had
2979 TREE_SIDE_EFFECTS set. */
2980 recalculate_side_effects (t);
2983 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2984 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2986 canonicalize_component_ref (expr_p);
2989 expr_stack.release ();
2991 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2993 return ret;
2996 /* Gimplify the self modifying expression pointed to by EXPR_P
2997 (++, --, +=, -=).
2999 PRE_P points to the list where side effects that must happen before
3000 *EXPR_P should be stored.
3002 POST_P points to the list where side effects that must happen after
3003 *EXPR_P should be stored.
3005 WANT_VALUE is nonzero iff we want to use the value of this expression
3006 in another expression.
3008 ARITH_TYPE is the type the computation should be performed in. */
3010 enum gimplify_status
3011 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3012 bool want_value, tree arith_type)
3014 enum tree_code code;
3015 tree lhs, lvalue, rhs, t1;
3016 gimple_seq post = NULL, *orig_post_p = post_p;
3017 bool postfix;
3018 enum tree_code arith_code;
3019 enum gimplify_status ret;
3020 location_t loc = EXPR_LOCATION (*expr_p);
3022 code = TREE_CODE (*expr_p);
3024 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3025 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3027 /* Prefix or postfix? */
3028 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3029 /* Faster to treat as prefix if result is not used. */
3030 postfix = want_value;
3031 else
3032 postfix = false;
3034 /* For postfix, make sure the inner expression's post side effects
3035 are executed after side effects from this expression. */
3036 if (postfix)
3037 post_p = &post;
3039 /* Add or subtract? */
3040 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3041 arith_code = PLUS_EXPR;
3042 else
3043 arith_code = MINUS_EXPR;
3045 /* Gimplify the LHS into a GIMPLE lvalue. */
3046 lvalue = TREE_OPERAND (*expr_p, 0);
3047 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3048 if (ret == GS_ERROR)
3049 return ret;
3051 /* Extract the operands to the arithmetic operation. */
3052 lhs = lvalue;
3053 rhs = TREE_OPERAND (*expr_p, 1);
3055 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3056 that as the result value and in the postqueue operation. */
3057 if (postfix)
3059 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3060 if (ret == GS_ERROR)
3061 return ret;
3063 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
3066 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3067 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3069 rhs = convert_to_ptrofftype_loc (loc, rhs);
3070 if (arith_code == MINUS_EXPR)
3071 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3072 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3074 else
3075 t1 = fold_convert (TREE_TYPE (*expr_p),
3076 fold_build2 (arith_code, arith_type,
3077 fold_convert (arith_type, lhs),
3078 fold_convert (arith_type, rhs)));
3080 if (postfix)
3082 gimplify_assign (lvalue, t1, pre_p);
3083 gimplify_seq_add_seq (orig_post_p, post);
3084 *expr_p = lhs;
3085 return GS_ALL_DONE;
3087 else
3089 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3090 return GS_OK;
3094 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3096 static void
3097 maybe_with_size_expr (tree *expr_p)
3099 tree expr = *expr_p;
3100 tree type = TREE_TYPE (expr);
3101 tree size;
3103 /* If we've already wrapped this or the type is error_mark_node, we can't do
3104 anything. */
3105 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3106 || type == error_mark_node)
3107 return;
3109 /* If the size isn't known or is a constant, we have nothing to do. */
3110 size = TYPE_SIZE_UNIT (type);
3111 if (!size || poly_int_tree_p (size))
3112 return;
3114 /* Otherwise, make a WITH_SIZE_EXPR. */
3115 size = unshare_expr (size);
3116 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3117 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3120 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3121 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3122 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3123 gimplified to an SSA name. */
3125 enum gimplify_status
3126 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3127 bool allow_ssa)
3129 bool (*test) (tree);
3130 fallback_t fb;
3132 /* In general, we allow lvalues for function arguments to avoid
3133 extra overhead of copying large aggregates out of even larger
3134 aggregates into temporaries only to copy the temporaries to
3135 the argument list. Make optimizers happy by pulling out to
3136 temporaries those types that fit in registers. */
3137 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3138 test = is_gimple_val, fb = fb_rvalue;
3139 else
3141 test = is_gimple_lvalue, fb = fb_either;
3142 /* Also strip a TARGET_EXPR that would force an extra copy. */
3143 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3145 tree init = TARGET_EXPR_INITIAL (*arg_p);
3146 if (init
3147 && !VOID_TYPE_P (TREE_TYPE (init)))
3148 *arg_p = init;
3152 /* If this is a variable sized type, we must remember the size. */
3153 maybe_with_size_expr (arg_p);
3155 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3156 /* Make sure arguments have the same location as the function call
3157 itself. */
3158 protected_set_expr_location (*arg_p, call_location);
3160 /* There is a sequence point before a function call. Side effects in
3161 the argument list must occur before the actual call. So, when
3162 gimplifying arguments, force gimplify_expr to use an internal
3163 post queue which is then appended to the end of PRE_P. */
3164 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3167 /* Don't fold inside offloading or taskreg regions: it can break code by
3168 adding decl references that weren't in the source. We'll do it during
3169 omplower pass instead. */
3171 static bool
3172 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3174 struct gimplify_omp_ctx *ctx;
3175 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3176 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3177 return false;
3178 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3179 return false;
3180 return fold_stmt (gsi);
3183 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3184 WANT_VALUE is true if the result of the call is desired. */
3186 static enum gimplify_status
3187 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3189 tree fndecl, parms, p, fnptrtype;
3190 enum gimplify_status ret;
3191 int i, nargs;
3192 gcall *call;
3193 bool builtin_va_start_p = false;
3194 location_t loc = EXPR_LOCATION (*expr_p);
3196 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3198 /* For reliable diagnostics during inlining, it is necessary that
3199 every call_expr be annotated with file and line. */
3200 if (! EXPR_HAS_LOCATION (*expr_p))
3201 SET_EXPR_LOCATION (*expr_p, input_location);
3203 /* Gimplify internal functions created in the FEs. */
3204 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3206 if (want_value)
3207 return GS_ALL_DONE;
3209 nargs = call_expr_nargs (*expr_p);
3210 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3211 auto_vec<tree> vargs (nargs);
3213 for (i = 0; i < nargs; i++)
3215 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3216 EXPR_LOCATION (*expr_p));
3217 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3220 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3221 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3222 gimplify_seq_add_stmt (pre_p, call);
3223 return GS_ALL_DONE;
3226 /* This may be a call to a builtin function.
3228 Builtin function calls may be transformed into different
3229 (and more efficient) builtin function calls under certain
3230 circumstances. Unfortunately, gimplification can muck things
3231 up enough that the builtin expanders are not aware that certain
3232 transformations are still valid.
3234 So we attempt transformation/gimplification of the call before
3235 we gimplify the CALL_EXPR. At this time we do not manage to
3236 transform all calls in the same manner as the expanders do, but
3237 we do transform most of them. */
3238 fndecl = get_callee_fndecl (*expr_p);
3239 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3240 switch (DECL_FUNCTION_CODE (fndecl))
3242 CASE_BUILT_IN_ALLOCA:
3243 /* If the call has been built for a variable-sized object, then we
3244 want to restore the stack level when the enclosing BIND_EXPR is
3245 exited to reclaim the allocated space; otherwise, we precisely
3246 need to do the opposite and preserve the latest stack level. */
3247 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3248 gimplify_ctxp->save_stack = true;
3249 else
3250 gimplify_ctxp->keep_stack = true;
3251 break;
3253 case BUILT_IN_VA_START:
3255 builtin_va_start_p = TRUE;
3256 if (call_expr_nargs (*expr_p) < 2)
3258 error ("too few arguments to function %<va_start%>");
3259 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3260 return GS_OK;
3263 if (fold_builtin_next_arg (*expr_p, true))
3265 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3266 return GS_OK;
3268 break;
3271 default:
3274 if (fndecl && fndecl_built_in_p (fndecl))
3276 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3277 if (new_tree && new_tree != *expr_p)
3279 /* There was a transformation of this call which computes the
3280 same value, but in a more efficient way. Return and try
3281 again. */
3282 *expr_p = new_tree;
3283 return GS_OK;
3287 /* Remember the original function pointer type. */
3288 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3290 /* There is a sequence point before the call, so any side effects in
3291 the calling expression must occur before the actual call. Force
3292 gimplify_expr to use an internal post queue. */
3293 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3294 is_gimple_call_addr, fb_rvalue);
3296 nargs = call_expr_nargs (*expr_p);
3298 /* Get argument types for verification. */
3299 fndecl = get_callee_fndecl (*expr_p);
3300 parms = NULL_TREE;
3301 if (fndecl)
3302 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3303 else
3304 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3306 if (fndecl && DECL_ARGUMENTS (fndecl))
3307 p = DECL_ARGUMENTS (fndecl);
3308 else if (parms)
3309 p = parms;
3310 else
3311 p = NULL_TREE;
3312 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3315 /* If the last argument is __builtin_va_arg_pack () and it is not
3316 passed as a named argument, decrease the number of CALL_EXPR
3317 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3318 if (!p
3319 && i < nargs
3320 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3322 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3323 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3325 if (last_arg_fndecl
3326 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3328 tree call = *expr_p;
3330 --nargs;
3331 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3332 CALL_EXPR_FN (call),
3333 nargs, CALL_EXPR_ARGP (call));
3335 /* Copy all CALL_EXPR flags, location and block, except
3336 CALL_EXPR_VA_ARG_PACK flag. */
3337 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3338 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3339 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3340 = CALL_EXPR_RETURN_SLOT_OPT (call);
3341 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3342 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3344 /* Set CALL_EXPR_VA_ARG_PACK. */
3345 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3349 /* If the call returns twice then after building the CFG the call
3350 argument computations will no longer dominate the call because
3351 we add an abnormal incoming edge to the call. So do not use SSA
3352 vars there. */
3353 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3355 /* Gimplify the function arguments. */
3356 if (nargs > 0)
3358 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3359 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3360 PUSH_ARGS_REVERSED ? i-- : i++)
3362 enum gimplify_status t;
3364 /* Avoid gimplifying the second argument to va_start, which needs to
3365 be the plain PARM_DECL. */
3366 if ((i != 1) || !builtin_va_start_p)
3368 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3369 EXPR_LOCATION (*expr_p), ! returns_twice);
3371 if (t == GS_ERROR)
3372 ret = GS_ERROR;
3377 /* Gimplify the static chain. */
3378 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3380 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3381 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3382 else
3384 enum gimplify_status t;
3385 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3386 EXPR_LOCATION (*expr_p), ! returns_twice);
3387 if (t == GS_ERROR)
3388 ret = GS_ERROR;
3392 /* Verify the function result. */
3393 if (want_value && fndecl
3394 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3396 error_at (loc, "using result of function returning %<void%>");
3397 ret = GS_ERROR;
3400 /* Try this again in case gimplification exposed something. */
3401 if (ret != GS_ERROR)
3403 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3405 if (new_tree && new_tree != *expr_p)
3407 /* There was a transformation of this call which computes the
3408 same value, but in a more efficient way. Return and try
3409 again. */
3410 *expr_p = new_tree;
3411 return GS_OK;
3414 else
3416 *expr_p = error_mark_node;
3417 return GS_ERROR;
3420 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3421 decl. This allows us to eliminate redundant or useless
3422 calls to "const" functions. */
3423 if (TREE_CODE (*expr_p) == CALL_EXPR)
3425 int flags = call_expr_flags (*expr_p);
3426 if (flags & (ECF_CONST | ECF_PURE)
3427 /* An infinite loop is considered a side effect. */
3428 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3429 TREE_SIDE_EFFECTS (*expr_p) = 0;
3432 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3433 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3434 form and delegate the creation of a GIMPLE_CALL to
3435 gimplify_modify_expr. This is always possible because when
3436 WANT_VALUE is true, the caller wants the result of this call into
3437 a temporary, which means that we will emit an INIT_EXPR in
3438 internal_get_tmp_var which will then be handled by
3439 gimplify_modify_expr. */
3440 if (!want_value)
3442 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3443 have to do is replicate it as a GIMPLE_CALL tuple. */
3444 gimple_stmt_iterator gsi;
3445 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3446 notice_special_calls (call);
3447 gimplify_seq_add_stmt (pre_p, call);
3448 gsi = gsi_last (*pre_p);
3449 maybe_fold_stmt (&gsi);
3450 *expr_p = NULL_TREE;
3452 else
3453 /* Remember the original function type. */
3454 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3455 CALL_EXPR_FN (*expr_p));
3457 return ret;
3460 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3461 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3463 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3464 condition is true or false, respectively. If null, we should generate
3465 our own to skip over the evaluation of this specific expression.
3467 LOCUS is the source location of the COND_EXPR.
3469 This function is the tree equivalent of do_jump.
3471 shortcut_cond_r should only be called by shortcut_cond_expr. */
3473 static tree
3474 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3475 location_t locus)
3477 tree local_label = NULL_TREE;
3478 tree t, expr = NULL;
3480 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3481 retain the shortcut semantics. Just insert the gotos here;
3482 shortcut_cond_expr will append the real blocks later. */
3483 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3485 location_t new_locus;
3487 /* Turn if (a && b) into
3489 if (a); else goto no;
3490 if (b) goto yes; else goto no;
3491 (no:) */
3493 if (false_label_p == NULL)
3494 false_label_p = &local_label;
3496 /* Keep the original source location on the first 'if'. */
3497 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3498 append_to_statement_list (t, &expr);
3500 /* Set the source location of the && on the second 'if'. */
3501 new_locus = rexpr_location (pred, locus);
3502 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3503 new_locus);
3504 append_to_statement_list (t, &expr);
3506 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3508 location_t new_locus;
3510 /* Turn if (a || b) into
3512 if (a) goto yes;
3513 if (b) goto yes; else goto no;
3514 (yes:) */
3516 if (true_label_p == NULL)
3517 true_label_p = &local_label;
3519 /* Keep the original source location on the first 'if'. */
3520 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3521 append_to_statement_list (t, &expr);
3523 /* Set the source location of the || on the second 'if'. */
3524 new_locus = rexpr_location (pred, locus);
3525 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3526 new_locus);
3527 append_to_statement_list (t, &expr);
3529 else if (TREE_CODE (pred) == COND_EXPR
3530 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3531 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3533 location_t new_locus;
3535 /* As long as we're messing with gotos, turn if (a ? b : c) into
3536 if (a)
3537 if (b) goto yes; else goto no;
3538 else
3539 if (c) goto yes; else goto no;
3541 Don't do this if one of the arms has void type, which can happen
3542 in C++ when the arm is throw. */
3544 /* Keep the original source location on the first 'if'. Set the source
3545 location of the ? on the second 'if'. */
3546 new_locus = rexpr_location (pred, locus);
3547 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3548 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3549 false_label_p, locus),
3550 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3551 false_label_p, new_locus));
3553 else
3555 expr = build3 (COND_EXPR, void_type_node, pred,
3556 build_and_jump (true_label_p),
3557 build_and_jump (false_label_p));
3558 SET_EXPR_LOCATION (expr, locus);
3561 if (local_label)
3563 t = build1 (LABEL_EXPR, void_type_node, local_label);
3564 append_to_statement_list (t, &expr);
3567 return expr;
3570 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3571 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3572 statement, if it is the last one. Otherwise, return NULL. */
3574 static tree
3575 find_goto (tree expr)
3577 if (!expr)
3578 return NULL_TREE;
3580 if (TREE_CODE (expr) == GOTO_EXPR)
3581 return expr;
3583 if (TREE_CODE (expr) != STATEMENT_LIST)
3584 return NULL_TREE;
3586 tree_stmt_iterator i = tsi_start (expr);
3588 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3589 tsi_next (&i);
3591 if (!tsi_one_before_end_p (i))
3592 return NULL_TREE;
3594 return find_goto (tsi_stmt (i));
3597 /* Same as find_goto, except that it returns NULL if the destination
3598 is not a LABEL_DECL. */
3600 static inline tree
3601 find_goto_label (tree expr)
3603 tree dest = find_goto (expr);
3604 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3605 return dest;
3606 return NULL_TREE;
3609 /* Given a conditional expression EXPR with short-circuit boolean
3610 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3611 predicate apart into the equivalent sequence of conditionals. */
3613 static tree
3614 shortcut_cond_expr (tree expr)
3616 tree pred = TREE_OPERAND (expr, 0);
3617 tree then_ = TREE_OPERAND (expr, 1);
3618 tree else_ = TREE_OPERAND (expr, 2);
3619 tree true_label, false_label, end_label, t;
3620 tree *true_label_p;
3621 tree *false_label_p;
3622 bool emit_end, emit_false, jump_over_else;
3623 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3624 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3626 /* First do simple transformations. */
3627 if (!else_se)
3629 /* If there is no 'else', turn
3630 if (a && b) then c
3631 into
3632 if (a) if (b) then c. */
3633 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3635 /* Keep the original source location on the first 'if'. */
3636 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3637 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3638 /* Set the source location of the && on the second 'if'. */
3639 if (rexpr_has_location (pred))
3640 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3641 then_ = shortcut_cond_expr (expr);
3642 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3643 pred = TREE_OPERAND (pred, 0);
3644 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3645 SET_EXPR_LOCATION (expr, locus);
3649 if (!then_se)
3651 /* If there is no 'then', turn
3652 if (a || b); else d
3653 into
3654 if (a); else if (b); else d. */
3655 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3657 /* Keep the original source location on the first 'if'. */
3658 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3659 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3660 /* Set the source location of the || on the second 'if'. */
3661 if (rexpr_has_location (pred))
3662 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3663 else_ = shortcut_cond_expr (expr);
3664 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3665 pred = TREE_OPERAND (pred, 0);
3666 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3667 SET_EXPR_LOCATION (expr, locus);
3671 /* If we're done, great. */
3672 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3673 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3674 return expr;
3676 /* Otherwise we need to mess with gotos. Change
3677 if (a) c; else d;
3679 if (a); else goto no;
3680 c; goto end;
3681 no: d; end:
3682 and recursively gimplify the condition. */
3684 true_label = false_label = end_label = NULL_TREE;
3686 /* If our arms just jump somewhere, hijack those labels so we don't
3687 generate jumps to jumps. */
3689 if (tree then_goto = find_goto_label (then_))
3691 true_label = GOTO_DESTINATION (then_goto);
3692 then_ = NULL;
3693 then_se = false;
3696 if (tree else_goto = find_goto_label (else_))
3698 false_label = GOTO_DESTINATION (else_goto);
3699 else_ = NULL;
3700 else_se = false;
3703 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3704 if (true_label)
3705 true_label_p = &true_label;
3706 else
3707 true_label_p = NULL;
3709 /* The 'else' branch also needs a label if it contains interesting code. */
3710 if (false_label || else_se)
3711 false_label_p = &false_label;
3712 else
3713 false_label_p = NULL;
3715 /* If there was nothing else in our arms, just forward the label(s). */
3716 if (!then_se && !else_se)
3717 return shortcut_cond_r (pred, true_label_p, false_label_p,
3718 EXPR_LOC_OR_LOC (expr, input_location));
3720 /* If our last subexpression already has a terminal label, reuse it. */
3721 if (else_se)
3722 t = expr_last (else_);
3723 else if (then_se)
3724 t = expr_last (then_);
3725 else
3726 t = NULL;
3727 if (t && TREE_CODE (t) == LABEL_EXPR)
3728 end_label = LABEL_EXPR_LABEL (t);
3730 /* If we don't care about jumping to the 'else' branch, jump to the end
3731 if the condition is false. */
3732 if (!false_label_p)
3733 false_label_p = &end_label;
3735 /* We only want to emit these labels if we aren't hijacking them. */
3736 emit_end = (end_label == NULL_TREE);
3737 emit_false = (false_label == NULL_TREE);
3739 /* We only emit the jump over the else clause if we have to--if the
3740 then clause may fall through. Otherwise we can wind up with a
3741 useless jump and a useless label at the end of gimplified code,
3742 which will cause us to think that this conditional as a whole
3743 falls through even if it doesn't. If we then inline a function
3744 which ends with such a condition, that can cause us to issue an
3745 inappropriate warning about control reaching the end of a
3746 non-void function. */
3747 jump_over_else = block_may_fallthru (then_);
3749 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3750 EXPR_LOC_OR_LOC (expr, input_location));
3752 expr = NULL;
3753 append_to_statement_list (pred, &expr);
3755 append_to_statement_list (then_, &expr);
3756 if (else_se)
3758 if (jump_over_else)
3760 tree last = expr_last (expr);
3761 t = build_and_jump (&end_label);
3762 if (rexpr_has_location (last))
3763 SET_EXPR_LOCATION (t, rexpr_location (last));
3764 append_to_statement_list (t, &expr);
3766 if (emit_false)
3768 t = build1 (LABEL_EXPR, void_type_node, false_label);
3769 append_to_statement_list (t, &expr);
3771 append_to_statement_list (else_, &expr);
3773 if (emit_end && end_label)
3775 t = build1 (LABEL_EXPR, void_type_node, end_label);
3776 append_to_statement_list (t, &expr);
3779 return expr;
3782 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3784 tree
3785 gimple_boolify (tree expr)
3787 tree type = TREE_TYPE (expr);
3788 location_t loc = EXPR_LOCATION (expr);
3790 if (TREE_CODE (expr) == NE_EXPR
3791 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3792 && integer_zerop (TREE_OPERAND (expr, 1)))
3794 tree call = TREE_OPERAND (expr, 0);
3795 tree fn = get_callee_fndecl (call);
3797 /* For __builtin_expect ((long) (x), y) recurse into x as well
3798 if x is truth_value_p. */
3799 if (fn
3800 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3801 && call_expr_nargs (call) == 2)
3803 tree arg = CALL_EXPR_ARG (call, 0);
3804 if (arg)
3806 if (TREE_CODE (arg) == NOP_EXPR
3807 && TREE_TYPE (arg) == TREE_TYPE (call))
3808 arg = TREE_OPERAND (arg, 0);
3809 if (truth_value_p (TREE_CODE (arg)))
3811 arg = gimple_boolify (arg);
3812 CALL_EXPR_ARG (call, 0)
3813 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3819 switch (TREE_CODE (expr))
3821 case TRUTH_AND_EXPR:
3822 case TRUTH_OR_EXPR:
3823 case TRUTH_XOR_EXPR:
3824 case TRUTH_ANDIF_EXPR:
3825 case TRUTH_ORIF_EXPR:
3826 /* Also boolify the arguments of truth exprs. */
3827 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3828 /* FALLTHRU */
3830 case TRUTH_NOT_EXPR:
3831 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3833 /* These expressions always produce boolean results. */
3834 if (TREE_CODE (type) != BOOLEAN_TYPE)
3835 TREE_TYPE (expr) = boolean_type_node;
3836 return expr;
3838 case ANNOTATE_EXPR:
3839 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3841 case annot_expr_ivdep_kind:
3842 case annot_expr_unroll_kind:
3843 case annot_expr_no_vector_kind:
3844 case annot_expr_vector_kind:
3845 case annot_expr_parallel_kind:
3846 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3847 if (TREE_CODE (type) != BOOLEAN_TYPE)
3848 TREE_TYPE (expr) = boolean_type_node;
3849 return expr;
3850 default:
3851 gcc_unreachable ();
3854 default:
3855 if (COMPARISON_CLASS_P (expr))
3857 /* There expressions always prduce boolean results. */
3858 if (TREE_CODE (type) != BOOLEAN_TYPE)
3859 TREE_TYPE (expr) = boolean_type_node;
3860 return expr;
3862 /* Other expressions that get here must have boolean values, but
3863 might need to be converted to the appropriate mode. */
3864 if (TREE_CODE (type) == BOOLEAN_TYPE)
3865 return expr;
3866 return fold_convert_loc (loc, boolean_type_node, expr);
3870 /* Given a conditional expression *EXPR_P without side effects, gimplify
3871 its operands. New statements are inserted to PRE_P. */
3873 static enum gimplify_status
3874 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3876 tree expr = *expr_p, cond;
3877 enum gimplify_status ret, tret;
3878 enum tree_code code;
3880 cond = gimple_boolify (COND_EXPR_COND (expr));
3882 /* We need to handle && and || specially, as their gimplification
3883 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3884 code = TREE_CODE (cond);
3885 if (code == TRUTH_ANDIF_EXPR)
3886 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3887 else if (code == TRUTH_ORIF_EXPR)
3888 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3889 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3890 COND_EXPR_COND (*expr_p) = cond;
3892 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3893 is_gimple_val, fb_rvalue);
3894 ret = MIN (ret, tret);
3895 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3896 is_gimple_val, fb_rvalue);
3898 return MIN (ret, tret);
3901 /* Return true if evaluating EXPR could trap.
3902 EXPR is GENERIC, while tree_could_trap_p can be called
3903 only on GIMPLE. */
3905 bool
3906 generic_expr_could_trap_p (tree expr)
3908 unsigned i, n;
3910 if (!expr || is_gimple_val (expr))
3911 return false;
3913 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3914 return true;
3916 n = TREE_OPERAND_LENGTH (expr);
3917 for (i = 0; i < n; i++)
3918 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3919 return true;
3921 return false;
3924 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3925 into
3927 if (p) if (p)
3928 t1 = a; a;
3929 else or else
3930 t1 = b; b;
3933 The second form is used when *EXPR_P is of type void.
3935 PRE_P points to the list where side effects that must happen before
3936 *EXPR_P should be stored. */
3938 static enum gimplify_status
3939 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3941 tree expr = *expr_p;
3942 tree type = TREE_TYPE (expr);
3943 location_t loc = EXPR_LOCATION (expr);
3944 tree tmp, arm1, arm2;
3945 enum gimplify_status ret;
3946 tree label_true, label_false, label_cont;
3947 bool have_then_clause_p, have_else_clause_p;
3948 gcond *cond_stmt;
3949 enum tree_code pred_code;
3950 gimple_seq seq = NULL;
3952 /* If this COND_EXPR has a value, copy the values into a temporary within
3953 the arms. */
3954 if (!VOID_TYPE_P (type))
3956 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3957 tree result;
3959 /* If either an rvalue is ok or we do not require an lvalue, create the
3960 temporary. But we cannot do that if the type is addressable. */
3961 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3962 && !TREE_ADDRESSABLE (type))
3964 if (gimplify_ctxp->allow_rhs_cond_expr
3965 /* If either branch has side effects or could trap, it can't be
3966 evaluated unconditionally. */
3967 && !TREE_SIDE_EFFECTS (then_)
3968 && !generic_expr_could_trap_p (then_)
3969 && !TREE_SIDE_EFFECTS (else_)
3970 && !generic_expr_could_trap_p (else_))
3971 return gimplify_pure_cond_expr (expr_p, pre_p);
3973 tmp = create_tmp_var (type, "iftmp");
3974 result = tmp;
3977 /* Otherwise, only create and copy references to the values. */
3978 else
3980 type = build_pointer_type (type);
3982 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3983 then_ = build_fold_addr_expr_loc (loc, then_);
3985 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3986 else_ = build_fold_addr_expr_loc (loc, else_);
3988 expr
3989 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3991 tmp = create_tmp_var (type, "iftmp");
3992 result = build_simple_mem_ref_loc (loc, tmp);
3995 /* Build the new then clause, `tmp = then_;'. But don't build the
3996 assignment if the value is void; in C++ it can be if it's a throw. */
3997 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3998 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
4000 /* Similarly, build the new else clause, `tmp = else_;'. */
4001 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4002 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
4004 TREE_TYPE (expr) = void_type_node;
4005 recalculate_side_effects (expr);
4007 /* Move the COND_EXPR to the prequeue. */
4008 gimplify_stmt (&expr, pre_p);
4010 *expr_p = result;
4011 return GS_ALL_DONE;
4014 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4015 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4016 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4017 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4019 /* Make sure the condition has BOOLEAN_TYPE. */
4020 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4022 /* Break apart && and || conditions. */
4023 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4024 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4026 expr = shortcut_cond_expr (expr);
4028 if (expr != *expr_p)
4030 *expr_p = expr;
4032 /* We can't rely on gimplify_expr to re-gimplify the expanded
4033 form properly, as cleanups might cause the target labels to be
4034 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4035 set up a conditional context. */
4036 gimple_push_condition ();
4037 gimplify_stmt (expr_p, &seq);
4038 gimple_pop_condition (pre_p);
4039 gimple_seq_add_seq (pre_p, seq);
4041 return GS_ALL_DONE;
4045 /* Now do the normal gimplification. */
4047 /* Gimplify condition. */
4048 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
4049 fb_rvalue);
4050 if (ret == GS_ERROR)
4051 return GS_ERROR;
4052 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4054 gimple_push_condition ();
4056 have_then_clause_p = have_else_clause_p = false;
4057 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4058 if (label_true
4059 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4060 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4061 have different locations, otherwise we end up with incorrect
4062 location information on the branches. */
4063 && (optimize
4064 || !EXPR_HAS_LOCATION (expr)
4065 || !rexpr_has_location (label_true)
4066 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4068 have_then_clause_p = true;
4069 label_true = GOTO_DESTINATION (label_true);
4071 else
4072 label_true = create_artificial_label (UNKNOWN_LOCATION);
4073 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4074 if (label_false
4075 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4076 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4077 have different locations, otherwise we end up with incorrect
4078 location information on the branches. */
4079 && (optimize
4080 || !EXPR_HAS_LOCATION (expr)
4081 || !rexpr_has_location (label_false)
4082 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4084 have_else_clause_p = true;
4085 label_false = GOTO_DESTINATION (label_false);
4087 else
4088 label_false = create_artificial_label (UNKNOWN_LOCATION);
4090 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4091 &arm2);
4092 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4093 label_false);
4094 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4095 gimplify_seq_add_stmt (&seq, cond_stmt);
4096 gimple_stmt_iterator gsi = gsi_last (seq);
4097 maybe_fold_stmt (&gsi);
4099 label_cont = NULL_TREE;
4100 if (!have_then_clause_p)
4102 /* For if (...) {} else { code; } put label_true after
4103 the else block. */
4104 if (TREE_OPERAND (expr, 1) == NULL_TREE
4105 && !have_else_clause_p
4106 && TREE_OPERAND (expr, 2) != NULL_TREE)
4107 label_cont = label_true;
4108 else
4110 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4111 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4112 /* For if (...) { code; } else {} or
4113 if (...) { code; } else goto label; or
4114 if (...) { code; return; } else { ... }
4115 label_cont isn't needed. */
4116 if (!have_else_clause_p
4117 && TREE_OPERAND (expr, 2) != NULL_TREE
4118 && gimple_seq_may_fallthru (seq))
4120 gimple *g;
4121 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4123 g = gimple_build_goto (label_cont);
4125 /* GIMPLE_COND's are very low level; they have embedded
4126 gotos. This particular embedded goto should not be marked
4127 with the location of the original COND_EXPR, as it would
4128 correspond to the COND_EXPR's condition, not the ELSE or the
4129 THEN arms. To avoid marking it with the wrong location, flag
4130 it as "no location". */
4131 gimple_set_do_not_emit_location (g);
4133 gimplify_seq_add_stmt (&seq, g);
4137 if (!have_else_clause_p)
4139 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4140 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4142 if (label_cont)
4143 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4145 gimple_pop_condition (pre_p);
4146 gimple_seq_add_seq (pre_p, seq);
4148 if (ret == GS_ERROR)
4149 ; /* Do nothing. */
4150 else if (have_then_clause_p || have_else_clause_p)
4151 ret = GS_ALL_DONE;
4152 else
4154 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4155 expr = TREE_OPERAND (expr, 0);
4156 gimplify_stmt (&expr, pre_p);
4159 *expr_p = NULL;
4160 return ret;
4163 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4164 to be marked addressable.
4166 We cannot rely on such an expression being directly markable if a temporary
4167 has been created by the gimplification. In this case, we create another
4168 temporary and initialize it with a copy, which will become a store after we
4169 mark it addressable. This can happen if the front-end passed us something
4170 that it could not mark addressable yet, like a Fortran pass-by-reference
4171 parameter (int) floatvar. */
4173 static void
4174 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4176 while (handled_component_p (*expr_p))
4177 expr_p = &TREE_OPERAND (*expr_p, 0);
4178 if (is_gimple_reg (*expr_p))
4180 /* Do not allow an SSA name as the temporary. */
4181 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4182 DECL_GIMPLE_REG_P (var) = 0;
4183 *expr_p = var;
4187 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4188 a call to __builtin_memcpy. */
4190 static enum gimplify_status
4191 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4192 gimple_seq *seq_p)
4194 tree t, to, to_ptr, from, from_ptr;
4195 gcall *gs;
4196 location_t loc = EXPR_LOCATION (*expr_p);
4198 to = TREE_OPERAND (*expr_p, 0);
4199 from = TREE_OPERAND (*expr_p, 1);
4201 /* Mark the RHS addressable. Beware that it may not be possible to do so
4202 directly if a temporary has been created by the gimplification. */
4203 prepare_gimple_addressable (&from, seq_p);
4205 mark_addressable (from);
4206 from_ptr = build_fold_addr_expr_loc (loc, from);
4207 gimplify_arg (&from_ptr, seq_p, loc);
4209 mark_addressable (to);
4210 to_ptr = build_fold_addr_expr_loc (loc, to);
4211 gimplify_arg (&to_ptr, seq_p, loc);
4213 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4215 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4217 if (want_value)
4219 /* tmp = memcpy() */
4220 t = create_tmp_var (TREE_TYPE (to_ptr));
4221 gimple_call_set_lhs (gs, t);
4222 gimplify_seq_add_stmt (seq_p, gs);
4224 *expr_p = build_simple_mem_ref (t);
4225 return GS_ALL_DONE;
4228 gimplify_seq_add_stmt (seq_p, gs);
4229 *expr_p = NULL;
4230 return GS_ALL_DONE;
4233 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4234 a call to __builtin_memset. In this case we know that the RHS is
4235 a CONSTRUCTOR with an empty element list. */
4237 static enum gimplify_status
4238 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4239 gimple_seq *seq_p)
4241 tree t, from, to, to_ptr;
4242 gcall *gs;
4243 location_t loc = EXPR_LOCATION (*expr_p);
4245 /* Assert our assumptions, to abort instead of producing wrong code
4246 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4247 not be immediately exposed. */
4248 from = TREE_OPERAND (*expr_p, 1);
4249 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4250 from = TREE_OPERAND (from, 0);
4252 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4253 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4255 /* Now proceed. */
4256 to = TREE_OPERAND (*expr_p, 0);
4258 to_ptr = build_fold_addr_expr_loc (loc, to);
4259 gimplify_arg (&to_ptr, seq_p, loc);
4260 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4262 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4264 if (want_value)
4266 /* tmp = memset() */
4267 t = create_tmp_var (TREE_TYPE (to_ptr));
4268 gimple_call_set_lhs (gs, t);
4269 gimplify_seq_add_stmt (seq_p, gs);
4271 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4272 return GS_ALL_DONE;
4275 gimplify_seq_add_stmt (seq_p, gs);
4276 *expr_p = NULL;
4277 return GS_ALL_DONE;
4280 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4281 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4282 assignment. Return non-null if we detect a potential overlap. */
4284 struct gimplify_init_ctor_preeval_data
4286 /* The base decl of the lhs object. May be NULL, in which case we
4287 have to assume the lhs is indirect. */
4288 tree lhs_base_decl;
4290 /* The alias set of the lhs object. */
4291 alias_set_type lhs_alias_set;
4294 static tree
4295 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4297 struct gimplify_init_ctor_preeval_data *data
4298 = (struct gimplify_init_ctor_preeval_data *) xdata;
4299 tree t = *tp;
4301 /* If we find the base object, obviously we have overlap. */
4302 if (data->lhs_base_decl == t)
4303 return t;
4305 /* If the constructor component is indirect, determine if we have a
4306 potential overlap with the lhs. The only bits of information we
4307 have to go on at this point are addressability and alias sets. */
4308 if ((INDIRECT_REF_P (t)
4309 || TREE_CODE (t) == MEM_REF)
4310 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4311 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4312 return t;
4314 /* If the constructor component is a call, determine if it can hide a
4315 potential overlap with the lhs through an INDIRECT_REF like above.
4316 ??? Ugh - this is completely broken. In fact this whole analysis
4317 doesn't look conservative. */
4318 if (TREE_CODE (t) == CALL_EXPR)
4320 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4322 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4323 if (POINTER_TYPE_P (TREE_VALUE (type))
4324 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4325 && alias_sets_conflict_p (data->lhs_alias_set,
4326 get_alias_set
4327 (TREE_TYPE (TREE_VALUE (type)))))
4328 return t;
4331 if (IS_TYPE_OR_DECL_P (t))
4332 *walk_subtrees = 0;
4333 return NULL;
4336 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4337 force values that overlap with the lhs (as described by *DATA)
4338 into temporaries. */
4340 static void
4341 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4342 struct gimplify_init_ctor_preeval_data *data)
4344 enum gimplify_status one;
4346 /* If the value is constant, then there's nothing to pre-evaluate. */
4347 if (TREE_CONSTANT (*expr_p))
4349 /* Ensure it does not have side effects, it might contain a reference to
4350 the object we're initializing. */
4351 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4352 return;
4355 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4356 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4357 return;
4359 /* Recurse for nested constructors. */
4360 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4362 unsigned HOST_WIDE_INT ix;
4363 constructor_elt *ce;
4364 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4366 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4367 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4369 return;
4372 /* If this is a variable sized type, we must remember the size. */
4373 maybe_with_size_expr (expr_p);
4375 /* Gimplify the constructor element to something appropriate for the rhs
4376 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4377 the gimplifier will consider this a store to memory. Doing this
4378 gimplification now means that we won't have to deal with complicated
4379 language-specific trees, nor trees like SAVE_EXPR that can induce
4380 exponential search behavior. */
4381 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4382 if (one == GS_ERROR)
4384 *expr_p = NULL;
4385 return;
4388 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4389 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4390 always be true for all scalars, since is_gimple_mem_rhs insists on a
4391 temporary variable for them. */
4392 if (DECL_P (*expr_p))
4393 return;
4395 /* If this is of variable size, we have no choice but to assume it doesn't
4396 overlap since we can't make a temporary for it. */
4397 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4398 return;
4400 /* Otherwise, we must search for overlap ... */
4401 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4402 return;
4404 /* ... and if found, force the value into a temporary. */
4405 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4408 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4409 a RANGE_EXPR in a CONSTRUCTOR for an array.
4411 var = lower;
4412 loop_entry:
4413 object[var] = value;
4414 if (var == upper)
4415 goto loop_exit;
4416 var = var + 1;
4417 goto loop_entry;
4418 loop_exit:
4420 We increment var _after_ the loop exit check because we might otherwise
4421 fail if upper == TYPE_MAX_VALUE (type for upper).
4423 Note that we never have to deal with SAVE_EXPRs here, because this has
4424 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4426 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4427 gimple_seq *, bool);
4429 static void
4430 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4431 tree value, tree array_elt_type,
4432 gimple_seq *pre_p, bool cleared)
4434 tree loop_entry_label, loop_exit_label, fall_thru_label;
4435 tree var, var_type, cref, tmp;
4437 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4438 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4439 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4441 /* Create and initialize the index variable. */
4442 var_type = TREE_TYPE (upper);
4443 var = create_tmp_var (var_type);
4444 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4446 /* Add the loop entry label. */
4447 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4449 /* Build the reference. */
4450 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4451 var, NULL_TREE, NULL_TREE);
4453 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4454 the store. Otherwise just assign value to the reference. */
4456 if (TREE_CODE (value) == CONSTRUCTOR)
4457 /* NB we might have to call ourself recursively through
4458 gimplify_init_ctor_eval if the value is a constructor. */
4459 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4460 pre_p, cleared);
4461 else
4462 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4464 /* We exit the loop when the index var is equal to the upper bound. */
4465 gimplify_seq_add_stmt (pre_p,
4466 gimple_build_cond (EQ_EXPR, var, upper,
4467 loop_exit_label, fall_thru_label));
4469 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4471 /* Otherwise, increment the index var... */
4472 tmp = build2 (PLUS_EXPR, var_type, var,
4473 fold_convert (var_type, integer_one_node));
4474 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4476 /* ...and jump back to the loop entry. */
4477 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4479 /* Add the loop exit label. */
4480 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4483 /* Return true if FDECL is accessing a field that is zero sized. */
4485 static bool
4486 zero_sized_field_decl (const_tree fdecl)
4488 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4489 && integer_zerop (DECL_SIZE (fdecl)))
4490 return true;
4491 return false;
4494 /* Return true if TYPE is zero sized. */
4496 static bool
4497 zero_sized_type (const_tree type)
4499 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4500 && integer_zerop (TYPE_SIZE (type)))
4501 return true;
4502 return false;
4505 /* A subroutine of gimplify_init_constructor. Generate individual
4506 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4507 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4508 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4509 zeroed first. */
4511 static void
4512 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4513 gimple_seq *pre_p, bool cleared)
4515 tree array_elt_type = NULL;
4516 unsigned HOST_WIDE_INT ix;
4517 tree purpose, value;
4519 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4520 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4522 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4524 tree cref;
4526 /* NULL values are created above for gimplification errors. */
4527 if (value == NULL)
4528 continue;
4530 if (cleared && initializer_zerop (value))
4531 continue;
4533 /* ??? Here's to hoping the front end fills in all of the indices,
4534 so we don't have to figure out what's missing ourselves. */
4535 gcc_assert (purpose);
4537 /* Skip zero-sized fields, unless value has side-effects. This can
4538 happen with calls to functions returning a zero-sized type, which
4539 we shouldn't discard. As a number of downstream passes don't
4540 expect sets of zero-sized fields, we rely on the gimplification of
4541 the MODIFY_EXPR we make below to drop the assignment statement. */
4542 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4543 continue;
4545 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4546 whole range. */
4547 if (TREE_CODE (purpose) == RANGE_EXPR)
4549 tree lower = TREE_OPERAND (purpose, 0);
4550 tree upper = TREE_OPERAND (purpose, 1);
4552 /* If the lower bound is equal to upper, just treat it as if
4553 upper was the index. */
4554 if (simple_cst_equal (lower, upper))
4555 purpose = upper;
4556 else
4558 gimplify_init_ctor_eval_range (object, lower, upper, value,
4559 array_elt_type, pre_p, cleared);
4560 continue;
4564 if (array_elt_type)
4566 /* Do not use bitsizetype for ARRAY_REF indices. */
4567 if (TYPE_DOMAIN (TREE_TYPE (object)))
4568 purpose
4569 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4570 purpose);
4571 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4572 purpose, NULL_TREE, NULL_TREE);
4574 else
4576 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4577 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4578 unshare_expr (object), purpose, NULL_TREE);
4581 if (TREE_CODE (value) == CONSTRUCTOR
4582 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4583 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4584 pre_p, cleared);
4585 else
4587 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4588 gimplify_and_add (init, pre_p);
4589 ggc_free (init);
4594 /* Return the appropriate RHS predicate for this LHS. */
4596 gimple_predicate
4597 rhs_predicate_for (tree lhs)
4599 if (is_gimple_reg (lhs))
4600 return is_gimple_reg_rhs_or_call;
4601 else
4602 return is_gimple_mem_rhs_or_call;
4605 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4606 before the LHS has been gimplified. */
4608 static gimple_predicate
4609 initial_rhs_predicate_for (tree lhs)
4611 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4612 return is_gimple_reg_rhs_or_call;
4613 else
4614 return is_gimple_mem_rhs_or_call;
4617 /* Gimplify a C99 compound literal expression. This just means adding
4618 the DECL_EXPR before the current statement and using its anonymous
4619 decl instead. */
4621 static enum gimplify_status
4622 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4623 bool (*gimple_test_f) (tree),
4624 fallback_t fallback)
4626 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4627 tree decl = DECL_EXPR_DECL (decl_s);
4628 tree init = DECL_INITIAL (decl);
4629 /* Mark the decl as addressable if the compound literal
4630 expression is addressable now, otherwise it is marked too late
4631 after we gimplify the initialization expression. */
4632 if (TREE_ADDRESSABLE (*expr_p))
4633 TREE_ADDRESSABLE (decl) = 1;
4634 /* Otherwise, if we don't need an lvalue and have a literal directly
4635 substitute it. Check if it matches the gimple predicate, as
4636 otherwise we'd generate a new temporary, and we can as well just
4637 use the decl we already have. */
4638 else if (!TREE_ADDRESSABLE (decl)
4639 && init
4640 && (fallback & fb_lvalue) == 0
4641 && gimple_test_f (init))
4643 *expr_p = init;
4644 return GS_OK;
4647 /* Preliminarily mark non-addressed complex variables as eligible
4648 for promotion to gimple registers. We'll transform their uses
4649 as we find them. */
4650 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4651 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4652 && !TREE_THIS_VOLATILE (decl)
4653 && !needs_to_live_in_memory (decl))
4654 DECL_GIMPLE_REG_P (decl) = 1;
4656 /* If the decl is not addressable, then it is being used in some
4657 expression or on the right hand side of a statement, and it can
4658 be put into a readonly data section. */
4659 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4660 TREE_READONLY (decl) = 1;
4662 /* This decl isn't mentioned in the enclosing block, so add it to the
4663 list of temps. FIXME it seems a bit of a kludge to say that
4664 anonymous artificial vars aren't pushed, but everything else is. */
4665 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4666 gimple_add_tmp_var (decl);
4668 gimplify_and_add (decl_s, pre_p);
4669 *expr_p = decl;
4670 return GS_OK;
4673 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4674 return a new CONSTRUCTOR if something changed. */
4676 static tree
4677 optimize_compound_literals_in_ctor (tree orig_ctor)
4679 tree ctor = orig_ctor;
4680 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4681 unsigned int idx, num = vec_safe_length (elts);
4683 for (idx = 0; idx < num; idx++)
4685 tree value = (*elts)[idx].value;
4686 tree newval = value;
4687 if (TREE_CODE (value) == CONSTRUCTOR)
4688 newval = optimize_compound_literals_in_ctor (value);
4689 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4691 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4692 tree decl = DECL_EXPR_DECL (decl_s);
4693 tree init = DECL_INITIAL (decl);
4695 if (!TREE_ADDRESSABLE (value)
4696 && !TREE_ADDRESSABLE (decl)
4697 && init
4698 && TREE_CODE (init) == CONSTRUCTOR)
4699 newval = optimize_compound_literals_in_ctor (init);
4701 if (newval == value)
4702 continue;
4704 if (ctor == orig_ctor)
4706 ctor = copy_node (orig_ctor);
4707 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4708 elts = CONSTRUCTOR_ELTS (ctor);
4710 (*elts)[idx].value = newval;
4712 return ctor;
4715 /* A subroutine of gimplify_modify_expr. Break out elements of a
4716 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4718 Note that we still need to clear any elements that don't have explicit
4719 initializers, so if not all elements are initialized we keep the
4720 original MODIFY_EXPR, we just remove all of the constructor elements.
4722 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4723 GS_ERROR if we would have to create a temporary when gimplifying
4724 this constructor. Otherwise, return GS_OK.
4726 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4728 static enum gimplify_status
4729 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4730 bool want_value, bool notify_temp_creation)
4732 tree object, ctor, type;
4733 enum gimplify_status ret;
4734 vec<constructor_elt, va_gc> *elts;
4736 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4738 if (!notify_temp_creation)
4740 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4741 is_gimple_lvalue, fb_lvalue);
4742 if (ret == GS_ERROR)
4743 return ret;
4746 object = TREE_OPERAND (*expr_p, 0);
4747 ctor = TREE_OPERAND (*expr_p, 1)
4748 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4749 type = TREE_TYPE (ctor);
4750 elts = CONSTRUCTOR_ELTS (ctor);
4751 ret = GS_ALL_DONE;
4753 switch (TREE_CODE (type))
4755 case RECORD_TYPE:
4756 case UNION_TYPE:
4757 case QUAL_UNION_TYPE:
4758 case ARRAY_TYPE:
4760 struct gimplify_init_ctor_preeval_data preeval_data;
4761 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4762 bool cleared, complete_p, valid_const_initializer;
4764 /* Aggregate types must lower constructors to initialization of
4765 individual elements. The exception is that a CONSTRUCTOR node
4766 with no elements indicates zero-initialization of the whole. */
4767 if (vec_safe_is_empty (elts))
4769 if (notify_temp_creation)
4770 return GS_OK;
4771 break;
4774 /* Fetch information about the constructor to direct later processing.
4775 We might want to make static versions of it in various cases, and
4776 can only do so if it known to be a valid constant initializer. */
4777 valid_const_initializer
4778 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4779 &num_ctor_elements, &complete_p);
4781 /* If a const aggregate variable is being initialized, then it
4782 should never be a lose to promote the variable to be static. */
4783 if (valid_const_initializer
4784 && num_nonzero_elements > 1
4785 && TREE_READONLY (object)
4786 && VAR_P (object)
4787 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4789 if (notify_temp_creation)
4790 return GS_ERROR;
4791 DECL_INITIAL (object) = ctor;
4792 TREE_STATIC (object) = 1;
4793 if (!DECL_NAME (object))
4794 DECL_NAME (object) = create_tmp_var_name ("C");
4795 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4797 /* ??? C++ doesn't automatically append a .<number> to the
4798 assembler name, and even when it does, it looks at FE private
4799 data structures to figure out what that number should be,
4800 which are not set for this variable. I suppose this is
4801 important for local statics for inline functions, which aren't
4802 "local" in the object file sense. So in order to get a unique
4803 TU-local symbol, we must invoke the lhd version now. */
4804 lhd_set_decl_assembler_name (object);
4806 *expr_p = NULL_TREE;
4807 break;
4810 /* If there are "lots" of initialized elements, even discounting
4811 those that are not address constants (and thus *must* be
4812 computed at runtime), then partition the constructor into
4813 constant and non-constant parts. Block copy the constant
4814 parts in, then generate code for the non-constant parts. */
4815 /* TODO. There's code in cp/typeck.c to do this. */
4817 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4818 /* store_constructor will ignore the clearing of variable-sized
4819 objects. Initializers for such objects must explicitly set
4820 every field that needs to be set. */
4821 cleared = false;
4822 else if (!complete_p)
4823 /* If the constructor isn't complete, clear the whole object
4824 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4826 ??? This ought not to be needed. For any element not present
4827 in the initializer, we should simply set them to zero. Except
4828 we'd need to *find* the elements that are not present, and that
4829 requires trickery to avoid quadratic compile-time behavior in
4830 large cases or excessive memory use in small cases. */
4831 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4832 else if (num_ctor_elements - num_nonzero_elements
4833 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4834 && num_nonzero_elements < num_ctor_elements / 4)
4835 /* If there are "lots" of zeros, it's more efficient to clear
4836 the memory and then set the nonzero elements. */
4837 cleared = true;
4838 else
4839 cleared = false;
4841 /* If there are "lots" of initialized elements, and all of them
4842 are valid address constants, then the entire initializer can
4843 be dropped to memory, and then memcpy'd out. Don't do this
4844 for sparse arrays, though, as it's more efficient to follow
4845 the standard CONSTRUCTOR behavior of memset followed by
4846 individual element initialization. Also don't do this for small
4847 all-zero initializers (which aren't big enough to merit
4848 clearing), and don't try to make bitwise copies of
4849 TREE_ADDRESSABLE types. */
4851 if (valid_const_initializer
4852 && !(cleared || num_nonzero_elements == 0)
4853 && !TREE_ADDRESSABLE (type))
4855 HOST_WIDE_INT size = int_size_in_bytes (type);
4856 unsigned int align;
4858 /* ??? We can still get unbounded array types, at least
4859 from the C++ front end. This seems wrong, but attempt
4860 to work around it for now. */
4861 if (size < 0)
4863 size = int_size_in_bytes (TREE_TYPE (object));
4864 if (size >= 0)
4865 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4868 /* Find the maximum alignment we can assume for the object. */
4869 /* ??? Make use of DECL_OFFSET_ALIGN. */
4870 if (DECL_P (object))
4871 align = DECL_ALIGN (object);
4872 else
4873 align = TYPE_ALIGN (type);
4875 /* Do a block move either if the size is so small as to make
4876 each individual move a sub-unit move on average, or if it
4877 is so large as to make individual moves inefficient. */
4878 if (size > 0
4879 && num_nonzero_elements > 1
4880 && (size < num_nonzero_elements
4881 || !can_move_by_pieces (size, align)))
4883 if (notify_temp_creation)
4884 return GS_ERROR;
4886 walk_tree (&ctor, force_labels_r, NULL, NULL);
4887 ctor = tree_output_constant_def (ctor);
4888 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4889 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4890 TREE_OPERAND (*expr_p, 1) = ctor;
4892 /* This is no longer an assignment of a CONSTRUCTOR, but
4893 we still may have processing to do on the LHS. So
4894 pretend we didn't do anything here to let that happen. */
4895 return GS_UNHANDLED;
4899 /* If the target is volatile, we have non-zero elements and more than
4900 one field to assign, initialize the target from a temporary. */
4901 if (TREE_THIS_VOLATILE (object)
4902 && !TREE_ADDRESSABLE (type)
4903 && num_nonzero_elements > 0
4904 && vec_safe_length (elts) > 1)
4906 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4907 TREE_OPERAND (*expr_p, 0) = temp;
4908 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4909 *expr_p,
4910 build2 (MODIFY_EXPR, void_type_node,
4911 object, temp));
4912 return GS_OK;
4915 if (notify_temp_creation)
4916 return GS_OK;
4918 /* If there are nonzero elements and if needed, pre-evaluate to capture
4919 elements overlapping with the lhs into temporaries. We must do this
4920 before clearing to fetch the values before they are zeroed-out. */
4921 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4923 preeval_data.lhs_base_decl = get_base_address (object);
4924 if (!DECL_P (preeval_data.lhs_base_decl))
4925 preeval_data.lhs_base_decl = NULL;
4926 preeval_data.lhs_alias_set = get_alias_set (object);
4928 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4929 pre_p, post_p, &preeval_data);
4932 bool ctor_has_side_effects_p
4933 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4935 if (cleared)
4937 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4938 Note that we still have to gimplify, in order to handle the
4939 case of variable sized types. Avoid shared tree structures. */
4940 CONSTRUCTOR_ELTS (ctor) = NULL;
4941 TREE_SIDE_EFFECTS (ctor) = 0;
4942 object = unshare_expr (object);
4943 gimplify_stmt (expr_p, pre_p);
4946 /* If we have not block cleared the object, or if there are nonzero
4947 elements in the constructor, or if the constructor has side effects,
4948 add assignments to the individual scalar fields of the object. */
4949 if (!cleared
4950 || num_nonzero_elements > 0
4951 || ctor_has_side_effects_p)
4952 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4954 *expr_p = NULL_TREE;
4956 break;
4958 case COMPLEX_TYPE:
4960 tree r, i;
4962 if (notify_temp_creation)
4963 return GS_OK;
4965 /* Extract the real and imaginary parts out of the ctor. */
4966 gcc_assert (elts->length () == 2);
4967 r = (*elts)[0].value;
4968 i = (*elts)[1].value;
4969 if (r == NULL || i == NULL)
4971 tree zero = build_zero_cst (TREE_TYPE (type));
4972 if (r == NULL)
4973 r = zero;
4974 if (i == NULL)
4975 i = zero;
4978 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4979 represent creation of a complex value. */
4980 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4982 ctor = build_complex (type, r, i);
4983 TREE_OPERAND (*expr_p, 1) = ctor;
4985 else
4987 ctor = build2 (COMPLEX_EXPR, type, r, i);
4988 TREE_OPERAND (*expr_p, 1) = ctor;
4989 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4990 pre_p,
4991 post_p,
4992 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4993 fb_rvalue);
4996 break;
4998 case VECTOR_TYPE:
5000 unsigned HOST_WIDE_INT ix;
5001 constructor_elt *ce;
5003 if (notify_temp_creation)
5004 return GS_OK;
5006 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5007 if (TREE_CONSTANT (ctor))
5009 bool constant_p = true;
5010 tree value;
5012 /* Even when ctor is constant, it might contain non-*_CST
5013 elements, such as addresses or trapping values like
5014 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5015 in VECTOR_CST nodes. */
5016 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5017 if (!CONSTANT_CLASS_P (value))
5019 constant_p = false;
5020 break;
5023 if (constant_p)
5025 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5026 break;
5029 TREE_CONSTANT (ctor) = 0;
5032 /* Vector types use CONSTRUCTOR all the way through gimple
5033 compilation as a general initializer. */
5034 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5036 enum gimplify_status tret;
5037 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5038 fb_rvalue);
5039 if (tret == GS_ERROR)
5040 ret = GS_ERROR;
5041 else if (TREE_STATIC (ctor)
5042 && !initializer_constant_valid_p (ce->value,
5043 TREE_TYPE (ce->value)))
5044 TREE_STATIC (ctor) = 0;
5046 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5047 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5049 break;
5051 default:
5052 /* So how did we get a CONSTRUCTOR for a scalar type? */
5053 gcc_unreachable ();
5056 if (ret == GS_ERROR)
5057 return GS_ERROR;
5058 /* If we have gimplified both sides of the initializer but have
5059 not emitted an assignment, do so now. */
5060 if (*expr_p)
5062 tree lhs = TREE_OPERAND (*expr_p, 0);
5063 tree rhs = TREE_OPERAND (*expr_p, 1);
5064 if (want_value && object == lhs)
5065 lhs = unshare_expr (lhs);
5066 gassign *init = gimple_build_assign (lhs, rhs);
5067 gimplify_seq_add_stmt (pre_p, init);
5069 if (want_value)
5071 *expr_p = object;
5072 return GS_OK;
5074 else
5076 *expr_p = NULL;
5077 return GS_ALL_DONE;
5081 /* Given a pointer value OP0, return a simplified version of an
5082 indirection through OP0, or NULL_TREE if no simplification is
5083 possible. This may only be applied to a rhs of an expression.
5084 Note that the resulting type may be different from the type pointed
5085 to in the sense that it is still compatible from the langhooks
5086 point of view. */
5088 static tree
5089 gimple_fold_indirect_ref_rhs (tree t)
5091 return gimple_fold_indirect_ref (t);
5094 /* Subroutine of gimplify_modify_expr to do simplifications of
5095 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5096 something changes. */
5098 static enum gimplify_status
5099 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5100 gimple_seq *pre_p, gimple_seq *post_p,
5101 bool want_value)
5103 enum gimplify_status ret = GS_UNHANDLED;
5104 bool changed;
5108 changed = false;
5109 switch (TREE_CODE (*from_p))
5111 case VAR_DECL:
5112 /* If we're assigning from a read-only variable initialized with
5113 a constructor, do the direct assignment from the constructor,
5114 but only if neither source nor target are volatile since this
5115 latter assignment might end up being done on a per-field basis. */
5116 if (DECL_INITIAL (*from_p)
5117 && TREE_READONLY (*from_p)
5118 && !TREE_THIS_VOLATILE (*from_p)
5119 && !TREE_THIS_VOLATILE (*to_p)
5120 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5122 tree old_from = *from_p;
5123 enum gimplify_status subret;
5125 /* Move the constructor into the RHS. */
5126 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5128 /* Let's see if gimplify_init_constructor will need to put
5129 it in memory. */
5130 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5131 false, true);
5132 if (subret == GS_ERROR)
5134 /* If so, revert the change. */
5135 *from_p = old_from;
5137 else
5139 ret = GS_OK;
5140 changed = true;
5143 break;
5144 case INDIRECT_REF:
5146 /* If we have code like
5148 *(const A*)(A*)&x
5150 where the type of "x" is a (possibly cv-qualified variant
5151 of "A"), treat the entire expression as identical to "x".
5152 This kind of code arises in C++ when an object is bound
5153 to a const reference, and if "x" is a TARGET_EXPR we want
5154 to take advantage of the optimization below. */
5155 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5156 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5157 if (t)
5159 if (TREE_THIS_VOLATILE (t) != volatile_p)
5161 if (DECL_P (t))
5162 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5163 build_fold_addr_expr (t));
5164 if (REFERENCE_CLASS_P (t))
5165 TREE_THIS_VOLATILE (t) = volatile_p;
5167 *from_p = t;
5168 ret = GS_OK;
5169 changed = true;
5171 break;
5174 case TARGET_EXPR:
5176 /* If we are initializing something from a TARGET_EXPR, strip the
5177 TARGET_EXPR and initialize it directly, if possible. This can't
5178 be done if the initializer is void, since that implies that the
5179 temporary is set in some non-trivial way.
5181 ??? What about code that pulls out the temp and uses it
5182 elsewhere? I think that such code never uses the TARGET_EXPR as
5183 an initializer. If I'm wrong, we'll die because the temp won't
5184 have any RTL. In that case, I guess we'll need to replace
5185 references somehow. */
5186 tree init = TARGET_EXPR_INITIAL (*from_p);
5188 if (init
5189 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5190 || !TARGET_EXPR_NO_ELIDE (*from_p))
5191 && !VOID_TYPE_P (TREE_TYPE (init)))
5193 *from_p = init;
5194 ret = GS_OK;
5195 changed = true;
5198 break;
5200 case COMPOUND_EXPR:
5201 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5202 caught. */
5203 gimplify_compound_expr (from_p, pre_p, true);
5204 ret = GS_OK;
5205 changed = true;
5206 break;
5208 case CONSTRUCTOR:
5209 /* If we already made some changes, let the front end have a
5210 crack at this before we break it down. */
5211 if (ret != GS_UNHANDLED)
5212 break;
5213 /* If we're initializing from a CONSTRUCTOR, break this into
5214 individual MODIFY_EXPRs. */
5215 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5216 false);
5218 case COND_EXPR:
5219 /* If we're assigning to a non-register type, push the assignment
5220 down into the branches. This is mandatory for ADDRESSABLE types,
5221 since we cannot generate temporaries for such, but it saves a
5222 copy in other cases as well. */
5223 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5225 /* This code should mirror the code in gimplify_cond_expr. */
5226 enum tree_code code = TREE_CODE (*expr_p);
5227 tree cond = *from_p;
5228 tree result = *to_p;
5230 ret = gimplify_expr (&result, pre_p, post_p,
5231 is_gimple_lvalue, fb_lvalue);
5232 if (ret != GS_ERROR)
5233 ret = GS_OK;
5235 /* If we are going to write RESULT more than once, clear
5236 TREE_READONLY flag, otherwise we might incorrectly promote
5237 the variable to static const and initialize it at compile
5238 time in one of the branches. */
5239 if (VAR_P (result)
5240 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5241 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5242 TREE_READONLY (result) = 0;
5243 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5244 TREE_OPERAND (cond, 1)
5245 = build2 (code, void_type_node, result,
5246 TREE_OPERAND (cond, 1));
5247 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5248 TREE_OPERAND (cond, 2)
5249 = build2 (code, void_type_node, unshare_expr (result),
5250 TREE_OPERAND (cond, 2));
5252 TREE_TYPE (cond) = void_type_node;
5253 recalculate_side_effects (cond);
5255 if (want_value)
5257 gimplify_and_add (cond, pre_p);
5258 *expr_p = unshare_expr (result);
5260 else
5261 *expr_p = cond;
5262 return ret;
5264 break;
5266 case CALL_EXPR:
5267 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5268 return slot so that we don't generate a temporary. */
5269 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5270 && aggregate_value_p (*from_p, *from_p))
5272 bool use_target;
5274 if (!(rhs_predicate_for (*to_p))(*from_p))
5275 /* If we need a temporary, *to_p isn't accurate. */
5276 use_target = false;
5277 /* It's OK to use the return slot directly unless it's an NRV. */
5278 else if (TREE_CODE (*to_p) == RESULT_DECL
5279 && DECL_NAME (*to_p) == NULL_TREE
5280 && needs_to_live_in_memory (*to_p))
5281 use_target = true;
5282 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5283 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5284 /* Don't force regs into memory. */
5285 use_target = false;
5286 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5287 /* It's OK to use the target directly if it's being
5288 initialized. */
5289 use_target = true;
5290 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5291 != INTEGER_CST)
5292 /* Always use the target and thus RSO for variable-sized types.
5293 GIMPLE cannot deal with a variable-sized assignment
5294 embedded in a call statement. */
5295 use_target = true;
5296 else if (TREE_CODE (*to_p) != SSA_NAME
5297 && (!is_gimple_variable (*to_p)
5298 || needs_to_live_in_memory (*to_p)))
5299 /* Don't use the original target if it's already addressable;
5300 if its address escapes, and the called function uses the
5301 NRV optimization, a conforming program could see *to_p
5302 change before the called function returns; see c++/19317.
5303 When optimizing, the return_slot pass marks more functions
5304 as safe after we have escape info. */
5305 use_target = false;
5306 else
5307 use_target = true;
5309 if (use_target)
5311 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5312 mark_addressable (*to_p);
5315 break;
5317 case WITH_SIZE_EXPR:
5318 /* Likewise for calls that return an aggregate of non-constant size,
5319 since we would not be able to generate a temporary at all. */
5320 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5322 *from_p = TREE_OPERAND (*from_p, 0);
5323 /* We don't change ret in this case because the
5324 WITH_SIZE_EXPR might have been added in
5325 gimplify_modify_expr, so returning GS_OK would lead to an
5326 infinite loop. */
5327 changed = true;
5329 break;
5331 /* If we're initializing from a container, push the initialization
5332 inside it. */
5333 case CLEANUP_POINT_EXPR:
5334 case BIND_EXPR:
5335 case STATEMENT_LIST:
5337 tree wrap = *from_p;
5338 tree t;
5340 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5341 fb_lvalue);
5342 if (ret != GS_ERROR)
5343 ret = GS_OK;
5345 t = voidify_wrapper_expr (wrap, *expr_p);
5346 gcc_assert (t == *expr_p);
5348 if (want_value)
5350 gimplify_and_add (wrap, pre_p);
5351 *expr_p = unshare_expr (*to_p);
5353 else
5354 *expr_p = wrap;
5355 return GS_OK;
5358 case COMPOUND_LITERAL_EXPR:
5360 tree complit = TREE_OPERAND (*expr_p, 1);
5361 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5362 tree decl = DECL_EXPR_DECL (decl_s);
5363 tree init = DECL_INITIAL (decl);
5365 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5366 into struct T x = { 0, 1, 2 } if the address of the
5367 compound literal has never been taken. */
5368 if (!TREE_ADDRESSABLE (complit)
5369 && !TREE_ADDRESSABLE (decl)
5370 && init)
5372 *expr_p = copy_node (*expr_p);
5373 TREE_OPERAND (*expr_p, 1) = init;
5374 return GS_OK;
5378 default:
5379 break;
5382 while (changed);
5384 return ret;
5388 /* Return true if T looks like a valid GIMPLE statement. */
5390 static bool
5391 is_gimple_stmt (tree t)
5393 const enum tree_code code = TREE_CODE (t);
5395 switch (code)
5397 case NOP_EXPR:
5398 /* The only valid NOP_EXPR is the empty statement. */
5399 return IS_EMPTY_STMT (t);
5401 case BIND_EXPR:
5402 case COND_EXPR:
5403 /* These are only valid if they're void. */
5404 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5406 case SWITCH_EXPR:
5407 case GOTO_EXPR:
5408 case RETURN_EXPR:
5409 case LABEL_EXPR:
5410 case CASE_LABEL_EXPR:
5411 case TRY_CATCH_EXPR:
5412 case TRY_FINALLY_EXPR:
5413 case EH_FILTER_EXPR:
5414 case CATCH_EXPR:
5415 case ASM_EXPR:
5416 case STATEMENT_LIST:
5417 case OACC_PARALLEL:
5418 case OACC_KERNELS:
5419 case OACC_DATA:
5420 case OACC_HOST_DATA:
5421 case OACC_DECLARE:
5422 case OACC_UPDATE:
5423 case OACC_ENTER_DATA:
5424 case OACC_EXIT_DATA:
5425 case OACC_CACHE:
5426 case OMP_PARALLEL:
5427 case OMP_FOR:
5428 case OMP_SIMD:
5429 case OMP_DISTRIBUTE:
5430 case OACC_LOOP:
5431 case OMP_SECTIONS:
5432 case OMP_SECTION:
5433 case OMP_SINGLE:
5434 case OMP_MASTER:
5435 case OMP_TASKGROUP:
5436 case OMP_ORDERED:
5437 case OMP_CRITICAL:
5438 case OMP_TASK:
5439 case OMP_TARGET:
5440 case OMP_TARGET_DATA:
5441 case OMP_TARGET_UPDATE:
5442 case OMP_TARGET_ENTER_DATA:
5443 case OMP_TARGET_EXIT_DATA:
5444 case OMP_TASKLOOP:
5445 case OMP_TEAMS:
5446 /* These are always void. */
5447 return true;
5449 case CALL_EXPR:
5450 case MODIFY_EXPR:
5451 case PREDICT_EXPR:
5452 /* These are valid regardless of their type. */
5453 return true;
5455 default:
5456 return false;
5461 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5462 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5463 DECL_GIMPLE_REG_P set.
5465 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5466 other, unmodified part of the complex object just before the total store.
5467 As a consequence, if the object is still uninitialized, an undefined value
5468 will be loaded into a register, which may result in a spurious exception
5469 if the register is floating-point and the value happens to be a signaling
5470 NaN for example. Then the fully-fledged complex operations lowering pass
5471 followed by a DCE pass are necessary in order to fix things up. */
5473 static enum gimplify_status
5474 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5475 bool want_value)
5477 enum tree_code code, ocode;
5478 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5480 lhs = TREE_OPERAND (*expr_p, 0);
5481 rhs = TREE_OPERAND (*expr_p, 1);
5482 code = TREE_CODE (lhs);
5483 lhs = TREE_OPERAND (lhs, 0);
5485 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5486 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5487 TREE_NO_WARNING (other) = 1;
5488 other = get_formal_tmp_var (other, pre_p);
5490 realpart = code == REALPART_EXPR ? rhs : other;
5491 imagpart = code == REALPART_EXPR ? other : rhs;
5493 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5494 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5495 else
5496 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5498 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5499 *expr_p = (want_value) ? rhs : NULL_TREE;
5501 return GS_ALL_DONE;
5504 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5506 modify_expr
5507 : varname '=' rhs
5508 | '*' ID '=' rhs
5510 PRE_P points to the list where side effects that must happen before
5511 *EXPR_P should be stored.
5513 POST_P points to the list where side effects that must happen after
5514 *EXPR_P should be stored.
5516 WANT_VALUE is nonzero iff we want to use the value of this expression
5517 in another expression. */
5519 static enum gimplify_status
5520 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5521 bool want_value)
5523 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5524 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5525 enum gimplify_status ret = GS_UNHANDLED;
5526 gimple *assign;
5527 location_t loc = EXPR_LOCATION (*expr_p);
5528 gimple_stmt_iterator gsi;
5530 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5531 || TREE_CODE (*expr_p) == INIT_EXPR);
5533 /* Trying to simplify a clobber using normal logic doesn't work,
5534 so handle it here. */
5535 if (TREE_CLOBBER_P (*from_p))
5537 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5538 if (ret == GS_ERROR)
5539 return ret;
5540 gcc_assert (!want_value);
5541 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5543 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5544 pre_p, post_p);
5545 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5547 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5548 *expr_p = NULL;
5549 return GS_ALL_DONE;
5552 /* Insert pointer conversions required by the middle-end that are not
5553 required by the frontend. This fixes middle-end type checking for
5554 for example gcc.dg/redecl-6.c. */
5555 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5557 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5558 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5559 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5562 /* See if any simplifications can be done based on what the RHS is. */
5563 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5564 want_value);
5565 if (ret != GS_UNHANDLED)
5566 return ret;
5568 /* For zero sized types only gimplify the left hand side and right hand
5569 side as statements and throw away the assignment. Do this after
5570 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5571 types properly. */
5572 if (zero_sized_type (TREE_TYPE (*from_p))
5573 && !want_value
5574 /* Don't do this for calls that return addressable types, expand_call
5575 relies on those having a lhs. */
5576 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5577 && TREE_CODE (*from_p) == CALL_EXPR))
5579 gimplify_stmt (from_p, pre_p);
5580 gimplify_stmt (to_p, pre_p);
5581 *expr_p = NULL_TREE;
5582 return GS_ALL_DONE;
5585 /* If the value being copied is of variable width, compute the length
5586 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5587 before gimplifying any of the operands so that we can resolve any
5588 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5589 the size of the expression to be copied, not of the destination, so
5590 that is what we must do here. */
5591 maybe_with_size_expr (from_p);
5593 /* As a special case, we have to temporarily allow for assignments
5594 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5595 a toplevel statement, when gimplifying the GENERIC expression
5596 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5597 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5599 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5600 prevent gimplify_expr from trying to create a new temporary for
5601 foo's LHS, we tell it that it should only gimplify until it
5602 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5603 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5604 and all we need to do here is set 'a' to be its LHS. */
5606 /* Gimplify the RHS first for C++17 and bug 71104. */
5607 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5608 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5609 if (ret == GS_ERROR)
5610 return ret;
5612 /* Then gimplify the LHS. */
5613 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5614 twice we have to make sure to gimplify into non-SSA as otherwise
5615 the abnormal edge added later will make those defs not dominate
5616 their uses.
5617 ??? Technically this applies only to the registers used in the
5618 resulting non-register *TO_P. */
5619 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5620 if (saved_into_ssa
5621 && TREE_CODE (*from_p) == CALL_EXPR
5622 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5623 gimplify_ctxp->into_ssa = false;
5624 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5625 gimplify_ctxp->into_ssa = saved_into_ssa;
5626 if (ret == GS_ERROR)
5627 return ret;
5629 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5630 guess for the predicate was wrong. */
5631 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5632 if (final_pred != initial_pred)
5634 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5635 if (ret == GS_ERROR)
5636 return ret;
5639 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5640 size as argument to the call. */
5641 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5643 tree call = TREE_OPERAND (*from_p, 0);
5644 tree vlasize = TREE_OPERAND (*from_p, 1);
5646 if (TREE_CODE (call) == CALL_EXPR
5647 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5649 int nargs = call_expr_nargs (call);
5650 tree type = TREE_TYPE (call);
5651 tree ap = CALL_EXPR_ARG (call, 0);
5652 tree tag = CALL_EXPR_ARG (call, 1);
5653 tree aptag = CALL_EXPR_ARG (call, 2);
5654 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5655 IFN_VA_ARG, type,
5656 nargs + 1, ap, tag,
5657 aptag, vlasize);
5658 TREE_OPERAND (*from_p, 0) = newcall;
5662 /* Now see if the above changed *from_p to something we handle specially. */
5663 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5664 want_value);
5665 if (ret != GS_UNHANDLED)
5666 return ret;
5668 /* If we've got a variable sized assignment between two lvalues (i.e. does
5669 not involve a call), then we can make things a bit more straightforward
5670 by converting the assignment to memcpy or memset. */
5671 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5673 tree from = TREE_OPERAND (*from_p, 0);
5674 tree size = TREE_OPERAND (*from_p, 1);
5676 if (TREE_CODE (from) == CONSTRUCTOR)
5677 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5679 if (is_gimple_addressable (from))
5681 *from_p = from;
5682 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5683 pre_p);
5687 /* Transform partial stores to non-addressable complex variables into
5688 total stores. This allows us to use real instead of virtual operands
5689 for these variables, which improves optimization. */
5690 if ((TREE_CODE (*to_p) == REALPART_EXPR
5691 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5692 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5693 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5695 /* Try to alleviate the effects of the gimplification creating artificial
5696 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5697 make sure not to create DECL_DEBUG_EXPR links across functions. */
5698 if (!gimplify_ctxp->into_ssa
5699 && VAR_P (*from_p)
5700 && DECL_IGNORED_P (*from_p)
5701 && DECL_P (*to_p)
5702 && !DECL_IGNORED_P (*to_p)
5703 && decl_function_context (*to_p) == current_function_decl
5704 && decl_function_context (*from_p) == current_function_decl)
5706 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5707 DECL_NAME (*from_p)
5708 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5709 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5710 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5713 if (want_value && TREE_THIS_VOLATILE (*to_p))
5714 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5716 if (TREE_CODE (*from_p) == CALL_EXPR)
5718 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5719 instead of a GIMPLE_ASSIGN. */
5720 gcall *call_stmt;
5721 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5723 /* Gimplify internal functions created in the FEs. */
5724 int nargs = call_expr_nargs (*from_p), i;
5725 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5726 auto_vec<tree> vargs (nargs);
5728 for (i = 0; i < nargs; i++)
5730 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5731 EXPR_LOCATION (*from_p));
5732 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5734 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5735 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5736 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5738 else
5740 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5741 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5742 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5743 tree fndecl = get_callee_fndecl (*from_p);
5744 if (fndecl
5745 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5746 && call_expr_nargs (*from_p) == 3)
5747 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5748 CALL_EXPR_ARG (*from_p, 0),
5749 CALL_EXPR_ARG (*from_p, 1),
5750 CALL_EXPR_ARG (*from_p, 2));
5751 else
5753 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5756 notice_special_calls (call_stmt);
5757 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5758 gimple_call_set_lhs (call_stmt, *to_p);
5759 else if (TREE_CODE (*to_p) == SSA_NAME)
5760 /* The above is somewhat premature, avoid ICEing later for a
5761 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5762 ??? This doesn't make it a default-def. */
5763 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5765 assign = call_stmt;
5767 else
5769 assign = gimple_build_assign (*to_p, *from_p);
5770 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5771 if (COMPARISON_CLASS_P (*from_p))
5772 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5775 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5777 /* We should have got an SSA name from the start. */
5778 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5779 || ! gimple_in_ssa_p (cfun));
5782 gimplify_seq_add_stmt (pre_p, assign);
5783 gsi = gsi_last (*pre_p);
5784 maybe_fold_stmt (&gsi);
5786 if (want_value)
5788 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5789 return GS_OK;
5791 else
5792 *expr_p = NULL;
5794 return GS_ALL_DONE;
5797 /* Gimplify a comparison between two variable-sized objects. Do this
5798 with a call to BUILT_IN_MEMCMP. */
5800 static enum gimplify_status
5801 gimplify_variable_sized_compare (tree *expr_p)
5803 location_t loc = EXPR_LOCATION (*expr_p);
5804 tree op0 = TREE_OPERAND (*expr_p, 0);
5805 tree op1 = TREE_OPERAND (*expr_p, 1);
5806 tree t, arg, dest, src, expr;
5808 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5809 arg = unshare_expr (arg);
5810 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5811 src = build_fold_addr_expr_loc (loc, op1);
5812 dest = build_fold_addr_expr_loc (loc, op0);
5813 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5814 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5816 expr
5817 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5818 SET_EXPR_LOCATION (expr, loc);
5819 *expr_p = expr;
5821 return GS_OK;
5824 /* Gimplify a comparison between two aggregate objects of integral scalar
5825 mode as a comparison between the bitwise equivalent scalar values. */
5827 static enum gimplify_status
5828 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5830 location_t loc = EXPR_LOCATION (*expr_p);
5831 tree op0 = TREE_OPERAND (*expr_p, 0);
5832 tree op1 = TREE_OPERAND (*expr_p, 1);
5834 tree type = TREE_TYPE (op0);
5835 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5837 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5838 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5840 *expr_p
5841 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5843 return GS_OK;
5846 /* Gimplify an expression sequence. This function gimplifies each
5847 expression and rewrites the original expression with the last
5848 expression of the sequence in GIMPLE form.
5850 PRE_P points to the list where the side effects for all the
5851 expressions in the sequence will be emitted.
5853 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5855 static enum gimplify_status
5856 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5858 tree t = *expr_p;
5862 tree *sub_p = &TREE_OPERAND (t, 0);
5864 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5865 gimplify_compound_expr (sub_p, pre_p, false);
5866 else
5867 gimplify_stmt (sub_p, pre_p);
5869 t = TREE_OPERAND (t, 1);
5871 while (TREE_CODE (t) == COMPOUND_EXPR);
5873 *expr_p = t;
5874 if (want_value)
5875 return GS_OK;
5876 else
5878 gimplify_stmt (expr_p, pre_p);
5879 return GS_ALL_DONE;
5883 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5884 gimplify. After gimplification, EXPR_P will point to a new temporary
5885 that holds the original value of the SAVE_EXPR node.
5887 PRE_P points to the list where side effects that must happen before
5888 *EXPR_P should be stored. */
5890 static enum gimplify_status
5891 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5893 enum gimplify_status ret = GS_ALL_DONE;
5894 tree val;
5896 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5897 val = TREE_OPERAND (*expr_p, 0);
5899 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5900 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5902 /* The operand may be a void-valued expression. It is
5903 being executed only for its side-effects. */
5904 if (TREE_TYPE (val) == void_type_node)
5906 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5907 is_gimple_stmt, fb_none);
5908 val = NULL;
5910 else
5911 /* The temporary may not be an SSA name as later abnormal and EH
5912 control flow may invalidate use/def domination. When in SSA
5913 form then assume there are no such issues and SAVE_EXPRs only
5914 appear via GENERIC foldings. */
5915 val = get_initialized_tmp_var (val, pre_p, post_p,
5916 gimple_in_ssa_p (cfun));
5918 TREE_OPERAND (*expr_p, 0) = val;
5919 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5922 *expr_p = val;
5924 return ret;
5927 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5929 unary_expr
5930 : ...
5931 | '&' varname
5934 PRE_P points to the list where side effects that must happen before
5935 *EXPR_P should be stored.
5937 POST_P points to the list where side effects that must happen after
5938 *EXPR_P should be stored. */
5940 static enum gimplify_status
5941 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5943 tree expr = *expr_p;
5944 tree op0 = TREE_OPERAND (expr, 0);
5945 enum gimplify_status ret;
5946 location_t loc = EXPR_LOCATION (*expr_p);
5948 switch (TREE_CODE (op0))
5950 case INDIRECT_REF:
5951 do_indirect_ref:
5952 /* Check if we are dealing with an expression of the form '&*ptr'.
5953 While the front end folds away '&*ptr' into 'ptr', these
5954 expressions may be generated internally by the compiler (e.g.,
5955 builtins like __builtin_va_end). */
5956 /* Caution: the silent array decomposition semantics we allow for
5957 ADDR_EXPR means we can't always discard the pair. */
5958 /* Gimplification of the ADDR_EXPR operand may drop
5959 cv-qualification conversions, so make sure we add them if
5960 needed. */
5962 tree op00 = TREE_OPERAND (op0, 0);
5963 tree t_expr = TREE_TYPE (expr);
5964 tree t_op00 = TREE_TYPE (op00);
5966 if (!useless_type_conversion_p (t_expr, t_op00))
5967 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5968 *expr_p = op00;
5969 ret = GS_OK;
5971 break;
5973 case VIEW_CONVERT_EXPR:
5974 /* Take the address of our operand and then convert it to the type of
5975 this ADDR_EXPR.
5977 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5978 all clear. The impact of this transformation is even less clear. */
5980 /* If the operand is a useless conversion, look through it. Doing so
5981 guarantees that the ADDR_EXPR and its operand will remain of the
5982 same type. */
5983 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5984 op0 = TREE_OPERAND (op0, 0);
5986 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5987 build_fold_addr_expr_loc (loc,
5988 TREE_OPERAND (op0, 0)));
5989 ret = GS_OK;
5990 break;
5992 case MEM_REF:
5993 if (integer_zerop (TREE_OPERAND (op0, 1)))
5994 goto do_indirect_ref;
5996 /* fall through */
5998 default:
5999 /* If we see a call to a declared builtin or see its address
6000 being taken (we can unify those cases here) then we can mark
6001 the builtin for implicit generation by GCC. */
6002 if (TREE_CODE (op0) == FUNCTION_DECL
6003 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6004 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6005 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6007 /* We use fb_either here because the C frontend sometimes takes
6008 the address of a call that returns a struct; see
6009 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6010 the implied temporary explicit. */
6012 /* Make the operand addressable. */
6013 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6014 is_gimple_addressable, fb_either);
6015 if (ret == GS_ERROR)
6016 break;
6018 /* Then mark it. Beware that it may not be possible to do so directly
6019 if a temporary has been created by the gimplification. */
6020 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6022 op0 = TREE_OPERAND (expr, 0);
6024 /* For various reasons, the gimplification of the expression
6025 may have made a new INDIRECT_REF. */
6026 if (TREE_CODE (op0) == INDIRECT_REF)
6027 goto do_indirect_ref;
6029 mark_addressable (TREE_OPERAND (expr, 0));
6031 /* The FEs may end up building ADDR_EXPRs early on a decl with
6032 an incomplete type. Re-build ADDR_EXPRs in canonical form
6033 here. */
6034 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6035 *expr_p = build_fold_addr_expr (op0);
6037 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6038 recompute_tree_invariant_for_addr_expr (*expr_p);
6040 /* If we re-built the ADDR_EXPR add a conversion to the original type
6041 if required. */
6042 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6043 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6045 break;
6048 return ret;
6051 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6052 value; output operands should be a gimple lvalue. */
6054 static enum gimplify_status
6055 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6057 tree expr;
6058 int noutputs;
6059 const char **oconstraints;
6060 int i;
6061 tree link;
6062 const char *constraint;
6063 bool allows_mem, allows_reg, is_inout;
6064 enum gimplify_status ret, tret;
6065 gasm *stmt;
6066 vec<tree, va_gc> *inputs;
6067 vec<tree, va_gc> *outputs;
6068 vec<tree, va_gc> *clobbers;
6069 vec<tree, va_gc> *labels;
6070 tree link_next;
6072 expr = *expr_p;
6073 noutputs = list_length (ASM_OUTPUTS (expr));
6074 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6076 inputs = NULL;
6077 outputs = NULL;
6078 clobbers = NULL;
6079 labels = NULL;
6081 ret = GS_ALL_DONE;
6082 link_next = NULL_TREE;
6083 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6085 bool ok;
6086 size_t constraint_len;
6088 link_next = TREE_CHAIN (link);
6090 oconstraints[i]
6091 = constraint
6092 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6093 constraint_len = strlen (constraint);
6094 if (constraint_len == 0)
6095 continue;
6097 ok = parse_output_constraint (&constraint, i, 0, 0,
6098 &allows_mem, &allows_reg, &is_inout);
6099 if (!ok)
6101 ret = GS_ERROR;
6102 is_inout = false;
6105 if (!allows_reg && allows_mem)
6106 mark_addressable (TREE_VALUE (link));
6108 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6109 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6110 fb_lvalue | fb_mayfail);
6111 if (tret == GS_ERROR)
6113 error ("invalid lvalue in asm output %d", i);
6114 ret = tret;
6117 /* If the constraint does not allow memory make sure we gimplify
6118 it to a register if it is not already but its base is. This
6119 happens for complex and vector components. */
6120 if (!allows_mem)
6122 tree op = TREE_VALUE (link);
6123 if (! is_gimple_val (op)
6124 && is_gimple_reg_type (TREE_TYPE (op))
6125 && is_gimple_reg (get_base_address (op)))
6127 tree tem = create_tmp_reg (TREE_TYPE (op));
6128 tree ass;
6129 if (is_inout)
6131 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6132 tem, unshare_expr (op));
6133 gimplify_and_add (ass, pre_p);
6135 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6136 gimplify_and_add (ass, post_p);
6138 TREE_VALUE (link) = tem;
6139 tret = GS_OK;
6143 vec_safe_push (outputs, link);
6144 TREE_CHAIN (link) = NULL_TREE;
6146 if (is_inout)
6148 /* An input/output operand. To give the optimizers more
6149 flexibility, split it into separate input and output
6150 operands. */
6151 tree input;
6152 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6153 char buf[11];
6155 /* Turn the in/out constraint into an output constraint. */
6156 char *p = xstrdup (constraint);
6157 p[0] = '=';
6158 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6160 /* And add a matching input constraint. */
6161 if (allows_reg)
6163 sprintf (buf, "%u", i);
6165 /* If there are multiple alternatives in the constraint,
6166 handle each of them individually. Those that allow register
6167 will be replaced with operand number, the others will stay
6168 unchanged. */
6169 if (strchr (p, ',') != NULL)
6171 size_t len = 0, buflen = strlen (buf);
6172 char *beg, *end, *str, *dst;
6174 for (beg = p + 1;;)
6176 end = strchr (beg, ',');
6177 if (end == NULL)
6178 end = strchr (beg, '\0');
6179 if ((size_t) (end - beg) < buflen)
6180 len += buflen + 1;
6181 else
6182 len += end - beg + 1;
6183 if (*end)
6184 beg = end + 1;
6185 else
6186 break;
6189 str = (char *) alloca (len);
6190 for (beg = p + 1, dst = str;;)
6192 const char *tem;
6193 bool mem_p, reg_p, inout_p;
6195 end = strchr (beg, ',');
6196 if (end)
6197 *end = '\0';
6198 beg[-1] = '=';
6199 tem = beg - 1;
6200 parse_output_constraint (&tem, i, 0, 0,
6201 &mem_p, &reg_p, &inout_p);
6202 if (dst != str)
6203 *dst++ = ',';
6204 if (reg_p)
6206 memcpy (dst, buf, buflen);
6207 dst += buflen;
6209 else
6211 if (end)
6212 len = end - beg;
6213 else
6214 len = strlen (beg);
6215 memcpy (dst, beg, len);
6216 dst += len;
6218 if (end)
6219 beg = end + 1;
6220 else
6221 break;
6223 *dst = '\0';
6224 input = build_string (dst - str, str);
6226 else
6227 input = build_string (strlen (buf), buf);
6229 else
6230 input = build_string (constraint_len - 1, constraint + 1);
6232 free (p);
6234 input = build_tree_list (build_tree_list (NULL_TREE, input),
6235 unshare_expr (TREE_VALUE (link)));
6236 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6240 link_next = NULL_TREE;
6241 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6243 link_next = TREE_CHAIN (link);
6244 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6245 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6246 oconstraints, &allows_mem, &allows_reg);
6248 /* If we can't make copies, we can only accept memory. */
6249 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6251 if (allows_mem)
6252 allows_reg = 0;
6253 else
6255 error ("impossible constraint in %<asm%>");
6256 error ("non-memory input %d must stay in memory", i);
6257 return GS_ERROR;
6261 /* If the operand is a memory input, it should be an lvalue. */
6262 if (!allows_reg && allows_mem)
6264 tree inputv = TREE_VALUE (link);
6265 STRIP_NOPS (inputv);
6266 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6267 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6268 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6269 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6270 || TREE_CODE (inputv) == MODIFY_EXPR)
6271 TREE_VALUE (link) = error_mark_node;
6272 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6273 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6274 if (tret != GS_ERROR)
6276 /* Unlike output operands, memory inputs are not guaranteed
6277 to be lvalues by the FE, and while the expressions are
6278 marked addressable there, if it is e.g. a statement
6279 expression, temporaries in it might not end up being
6280 addressable. They might be already used in the IL and thus
6281 it is too late to make them addressable now though. */
6282 tree x = TREE_VALUE (link);
6283 while (handled_component_p (x))
6284 x = TREE_OPERAND (x, 0);
6285 if (TREE_CODE (x) == MEM_REF
6286 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6287 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6288 if ((VAR_P (x)
6289 || TREE_CODE (x) == PARM_DECL
6290 || TREE_CODE (x) == RESULT_DECL)
6291 && !TREE_ADDRESSABLE (x)
6292 && is_gimple_reg (x))
6294 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6295 input_location), 0,
6296 "memory input %d is not directly addressable",
6298 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6301 mark_addressable (TREE_VALUE (link));
6302 if (tret == GS_ERROR)
6304 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6305 "memory input %d is not directly addressable", i);
6306 ret = tret;
6309 else
6311 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6312 is_gimple_asm_val, fb_rvalue);
6313 if (tret == GS_ERROR)
6314 ret = tret;
6317 TREE_CHAIN (link) = NULL_TREE;
6318 vec_safe_push (inputs, link);
6321 link_next = NULL_TREE;
6322 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6324 link_next = TREE_CHAIN (link);
6325 TREE_CHAIN (link) = NULL_TREE;
6326 vec_safe_push (clobbers, link);
6329 link_next = NULL_TREE;
6330 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6332 link_next = TREE_CHAIN (link);
6333 TREE_CHAIN (link) = NULL_TREE;
6334 vec_safe_push (labels, link);
6337 /* Do not add ASMs with errors to the gimple IL stream. */
6338 if (ret != GS_ERROR)
6340 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6341 inputs, outputs, clobbers, labels);
6343 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6344 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6346 gimplify_seq_add_stmt (pre_p, stmt);
6349 return ret;
6352 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6353 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6354 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6355 return to this function.
6357 FIXME should we complexify the prequeue handling instead? Or use flags
6358 for all the cleanups and let the optimizer tighten them up? The current
6359 code seems pretty fragile; it will break on a cleanup within any
6360 non-conditional nesting. But any such nesting would be broken, anyway;
6361 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6362 and continues out of it. We can do that at the RTL level, though, so
6363 having an optimizer to tighten up try/finally regions would be a Good
6364 Thing. */
6366 static enum gimplify_status
6367 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6369 gimple_stmt_iterator iter;
6370 gimple_seq body_sequence = NULL;
6372 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6374 /* We only care about the number of conditions between the innermost
6375 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6376 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6377 int old_conds = gimplify_ctxp->conditions;
6378 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6379 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6380 gimplify_ctxp->conditions = 0;
6381 gimplify_ctxp->conditional_cleanups = NULL;
6382 gimplify_ctxp->in_cleanup_point_expr = true;
6384 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6386 gimplify_ctxp->conditions = old_conds;
6387 gimplify_ctxp->conditional_cleanups = old_cleanups;
6388 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6390 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6392 gimple *wce = gsi_stmt (iter);
6394 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6396 if (gsi_one_before_end_p (iter))
6398 /* Note that gsi_insert_seq_before and gsi_remove do not
6399 scan operands, unlike some other sequence mutators. */
6400 if (!gimple_wce_cleanup_eh_only (wce))
6401 gsi_insert_seq_before_without_update (&iter,
6402 gimple_wce_cleanup (wce),
6403 GSI_SAME_STMT);
6404 gsi_remove (&iter, true);
6405 break;
6407 else
6409 gtry *gtry;
6410 gimple_seq seq;
6411 enum gimple_try_flags kind;
6413 if (gimple_wce_cleanup_eh_only (wce))
6414 kind = GIMPLE_TRY_CATCH;
6415 else
6416 kind = GIMPLE_TRY_FINALLY;
6417 seq = gsi_split_seq_after (iter);
6419 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6420 /* Do not use gsi_replace here, as it may scan operands.
6421 We want to do a simple structural modification only. */
6422 gsi_set_stmt (&iter, gtry);
6423 iter = gsi_start (gtry->eval);
6426 else
6427 gsi_next (&iter);
6430 gimplify_seq_add_seq (pre_p, body_sequence);
6431 if (temp)
6433 *expr_p = temp;
6434 return GS_OK;
6436 else
6438 *expr_p = NULL;
6439 return GS_ALL_DONE;
6443 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6444 is the cleanup action required. EH_ONLY is true if the cleanup should
6445 only be executed if an exception is thrown, not on normal exit.
6446 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6447 only valid for clobbers. */
6449 static void
6450 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6451 bool force_uncond = false)
6453 gimple *wce;
6454 gimple_seq cleanup_stmts = NULL;
6456 /* Errors can result in improperly nested cleanups. Which results in
6457 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6458 if (seen_error ())
6459 return;
6461 if (gimple_conditional_context ())
6463 /* If we're in a conditional context, this is more complex. We only
6464 want to run the cleanup if we actually ran the initialization that
6465 necessitates it, but we want to run it after the end of the
6466 conditional context. So we wrap the try/finally around the
6467 condition and use a flag to determine whether or not to actually
6468 run the destructor. Thus
6470 test ? f(A()) : 0
6472 becomes (approximately)
6474 flag = 0;
6475 try {
6476 if (test) { A::A(temp); flag = 1; val = f(temp); }
6477 else { val = 0; }
6478 } finally {
6479 if (flag) A::~A(temp);
6483 if (force_uncond)
6485 gimplify_stmt (&cleanup, &cleanup_stmts);
6486 wce = gimple_build_wce (cleanup_stmts);
6487 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6489 else
6491 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6492 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6493 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6495 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6496 gimplify_stmt (&cleanup, &cleanup_stmts);
6497 wce = gimple_build_wce (cleanup_stmts);
6499 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6500 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6501 gimplify_seq_add_stmt (pre_p, ftrue);
6503 /* Because of this manipulation, and the EH edges that jump
6504 threading cannot redirect, the temporary (VAR) will appear
6505 to be used uninitialized. Don't warn. */
6506 TREE_NO_WARNING (var) = 1;
6509 else
6511 gimplify_stmt (&cleanup, &cleanup_stmts);
6512 wce = gimple_build_wce (cleanup_stmts);
6513 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6514 gimplify_seq_add_stmt (pre_p, wce);
6518 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6520 static enum gimplify_status
6521 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6523 tree targ = *expr_p;
6524 tree temp = TARGET_EXPR_SLOT (targ);
6525 tree init = TARGET_EXPR_INITIAL (targ);
6526 enum gimplify_status ret;
6528 bool unpoison_empty_seq = false;
6529 gimple_stmt_iterator unpoison_it;
6531 if (init)
6533 tree cleanup = NULL_TREE;
6535 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6536 to the temps list. Handle also variable length TARGET_EXPRs. */
6537 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6539 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6540 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6541 gimplify_vla_decl (temp, pre_p);
6543 else
6545 /* Save location where we need to place unpoisoning. It's possible
6546 that a variable will be converted to needs_to_live_in_memory. */
6547 unpoison_it = gsi_last (*pre_p);
6548 unpoison_empty_seq = gsi_end_p (unpoison_it);
6550 gimple_add_tmp_var (temp);
6553 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6554 expression is supposed to initialize the slot. */
6555 if (VOID_TYPE_P (TREE_TYPE (init)))
6556 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6557 else
6559 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6560 init = init_expr;
6561 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6562 init = NULL;
6563 ggc_free (init_expr);
6565 if (ret == GS_ERROR)
6567 /* PR c++/28266 Make sure this is expanded only once. */
6568 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6569 return GS_ERROR;
6571 if (init)
6572 gimplify_and_add (init, pre_p);
6574 /* If needed, push the cleanup for the temp. */
6575 if (TARGET_EXPR_CLEANUP (targ))
6577 if (CLEANUP_EH_ONLY (targ))
6578 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6579 CLEANUP_EH_ONLY (targ), pre_p);
6580 else
6581 cleanup = TARGET_EXPR_CLEANUP (targ);
6584 /* Add a clobber for the temporary going out of scope, like
6585 gimplify_bind_expr. */
6586 if (gimplify_ctxp->in_cleanup_point_expr
6587 && needs_to_live_in_memory (temp))
6589 if (flag_stack_reuse == SR_ALL)
6591 tree clobber = build_clobber (TREE_TYPE (temp));
6592 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6593 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6595 if (asan_poisoned_variables
6596 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6597 && dbg_cnt (asan_use_after_scope)
6598 && !gimplify_omp_ctxp)
6600 tree asan_cleanup = build_asan_poison_call_expr (temp);
6601 if (asan_cleanup)
6603 if (unpoison_empty_seq)
6604 unpoison_it = gsi_start (*pre_p);
6606 asan_poison_variable (temp, false, &unpoison_it,
6607 unpoison_empty_seq);
6608 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6612 if (cleanup)
6613 gimple_push_cleanup (temp, cleanup, false, pre_p);
6615 /* Only expand this once. */
6616 TREE_OPERAND (targ, 3) = init;
6617 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6619 else
6620 /* We should have expanded this before. */
6621 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6623 *expr_p = temp;
6624 return GS_OK;
6627 /* Gimplification of expression trees. */
6629 /* Gimplify an expression which appears at statement context. The
6630 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6631 NULL, a new sequence is allocated.
6633 Return true if we actually added a statement to the queue. */
6635 bool
6636 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6638 gimple_seq_node last;
6640 last = gimple_seq_last (*seq_p);
6641 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6642 return last != gimple_seq_last (*seq_p);
6645 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6646 to CTX. If entries already exist, force them to be some flavor of private.
6647 If there is no enclosing parallel, do nothing. */
6649 void
6650 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6652 splay_tree_node n;
6654 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6655 return;
6659 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6660 if (n != NULL)
6662 if (n->value & GOVD_SHARED)
6663 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6664 else if (n->value & GOVD_MAP)
6665 n->value |= GOVD_MAP_TO_ONLY;
6666 else
6667 return;
6669 else if ((ctx->region_type & ORT_TARGET) != 0)
6671 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6672 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6673 else
6674 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6676 else if (ctx->region_type != ORT_WORKSHARE
6677 && ctx->region_type != ORT_TASKGROUP
6678 && ctx->region_type != ORT_SIMD
6679 && ctx->region_type != ORT_ACC
6680 && !(ctx->region_type & ORT_TARGET_DATA))
6681 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6683 ctx = ctx->outer_context;
6685 while (ctx);
6688 /* Similarly for each of the type sizes of TYPE. */
6690 static void
6691 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6693 if (type == NULL || type == error_mark_node)
6694 return;
6695 type = TYPE_MAIN_VARIANT (type);
6697 if (ctx->privatized_types->add (type))
6698 return;
6700 switch (TREE_CODE (type))
6702 case INTEGER_TYPE:
6703 case ENUMERAL_TYPE:
6704 case BOOLEAN_TYPE:
6705 case REAL_TYPE:
6706 case FIXED_POINT_TYPE:
6707 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6708 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6709 break;
6711 case ARRAY_TYPE:
6712 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6713 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6714 break;
6716 case RECORD_TYPE:
6717 case UNION_TYPE:
6718 case QUAL_UNION_TYPE:
6720 tree field;
6721 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6722 if (TREE_CODE (field) == FIELD_DECL)
6724 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6725 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6728 break;
6730 case POINTER_TYPE:
6731 case REFERENCE_TYPE:
6732 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6733 break;
6735 default:
6736 break;
6739 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6740 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6741 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6744 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6746 static void
6747 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6749 splay_tree_node n;
6750 unsigned int nflags;
6751 tree t;
6753 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6754 return;
6756 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6757 there are constructors involved somewhere. Exception is a shared clause,
6758 there is nothing privatized in that case. */
6759 if ((flags & GOVD_SHARED) == 0
6760 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6761 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6762 flags |= GOVD_SEEN;
6764 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6765 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6767 /* We shouldn't be re-adding the decl with the same data
6768 sharing class. */
6769 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6770 nflags = n->value | flags;
6771 /* The only combination of data sharing classes we should see is
6772 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6773 reduction variables to be used in data sharing clauses. */
6774 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6775 || ((nflags & GOVD_DATA_SHARE_CLASS)
6776 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6777 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6778 n->value = nflags;
6779 return;
6782 /* When adding a variable-sized variable, we have to handle all sorts
6783 of additional bits of data: the pointer replacement variable, and
6784 the parameters of the type. */
6785 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6787 /* Add the pointer replacement variable as PRIVATE if the variable
6788 replacement is private, else FIRSTPRIVATE since we'll need the
6789 address of the original variable either for SHARED, or for the
6790 copy into or out of the context. */
6791 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
6793 if (flags & GOVD_MAP)
6794 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6795 else if (flags & GOVD_PRIVATE)
6796 nflags = GOVD_PRIVATE;
6797 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6798 && (flags & GOVD_FIRSTPRIVATE))
6799 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6800 else
6801 nflags = GOVD_FIRSTPRIVATE;
6802 nflags |= flags & GOVD_SEEN;
6803 t = DECL_VALUE_EXPR (decl);
6804 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6805 t = TREE_OPERAND (t, 0);
6806 gcc_assert (DECL_P (t));
6807 omp_add_variable (ctx, t, nflags);
6810 /* Add all of the variable and type parameters (which should have
6811 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6812 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6813 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6814 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6816 /* The variable-sized variable itself is never SHARED, only some form
6817 of PRIVATE. The sharing would take place via the pointer variable
6818 which we remapped above. */
6819 if (flags & GOVD_SHARED)
6820 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6821 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6823 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6824 alloca statement we generate for the variable, so make sure it
6825 is available. This isn't automatically needed for the SHARED
6826 case, since we won't be allocating local storage then.
6827 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6828 in this case omp_notice_variable will be called later
6829 on when it is gimplified. */
6830 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6831 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6832 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6834 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6835 && lang_hooks.decls.omp_privatize_by_reference (decl))
6837 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6839 /* Similar to the direct variable sized case above, we'll need the
6840 size of references being privatized. */
6841 if ((flags & GOVD_SHARED) == 0)
6843 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6844 if (DECL_P (t))
6845 omp_notice_variable (ctx, t, true);
6849 if (n != NULL)
6850 n->value |= flags;
6851 else
6852 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6854 /* For reductions clauses in OpenACC loop directives, by default create a
6855 copy clause on the enclosing parallel construct for carrying back the
6856 results. */
6857 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6859 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6860 while (outer_ctx)
6862 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6863 if (n != NULL)
6865 /* Ignore local variables and explicitly declared clauses. */
6866 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6867 break;
6868 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6870 /* According to the OpenACC spec, such a reduction variable
6871 should already have a copy map on a kernels construct,
6872 verify that here. */
6873 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6874 && (n->value & GOVD_MAP));
6876 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6878 /* Remove firstprivate and make it a copy map. */
6879 n->value &= ~GOVD_FIRSTPRIVATE;
6880 n->value |= GOVD_MAP;
6883 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6885 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6886 GOVD_MAP | GOVD_SEEN);
6887 break;
6889 outer_ctx = outer_ctx->outer_context;
6894 /* Notice a threadprivate variable DECL used in OMP context CTX.
6895 This just prints out diagnostics about threadprivate variable uses
6896 in untied tasks. If DECL2 is non-NULL, prevent this warning
6897 on that variable. */
6899 static bool
6900 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6901 tree decl2)
6903 splay_tree_node n;
6904 struct gimplify_omp_ctx *octx;
6906 for (octx = ctx; octx; octx = octx->outer_context)
6907 if ((octx->region_type & ORT_TARGET) != 0)
6909 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6910 if (n == NULL)
6912 error ("threadprivate variable %qE used in target region",
6913 DECL_NAME (decl));
6914 error_at (octx->location, "enclosing target region");
6915 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6917 if (decl2)
6918 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6921 if (ctx->region_type != ORT_UNTIED_TASK)
6922 return false;
6923 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6924 if (n == NULL)
6926 error ("threadprivate variable %qE used in untied task",
6927 DECL_NAME (decl));
6928 error_at (ctx->location, "enclosing task");
6929 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6931 if (decl2)
6932 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6933 return false;
6936 /* Return true if global var DECL is device resident. */
6938 static bool
6939 device_resident_p (tree decl)
6941 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6943 if (!attr)
6944 return false;
6946 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6948 tree c = TREE_VALUE (t);
6949 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6950 return true;
6953 return false;
6956 /* Return true if DECL has an ACC DECLARE attribute. */
6958 static bool
6959 is_oacc_declared (tree decl)
6961 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6962 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6963 return declared != NULL_TREE;
6966 /* Determine outer default flags for DECL mentioned in an OMP region
6967 but not declared in an enclosing clause.
6969 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6970 remapped firstprivate instead of shared. To some extent this is
6971 addressed in omp_firstprivatize_type_sizes, but not
6972 effectively. */
6974 static unsigned
6975 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6976 bool in_code, unsigned flags)
6978 enum omp_clause_default_kind default_kind = ctx->default_kind;
6979 enum omp_clause_default_kind kind;
6981 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6982 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6983 default_kind = kind;
6985 switch (default_kind)
6987 case OMP_CLAUSE_DEFAULT_NONE:
6989 const char *rtype;
6991 if (ctx->region_type & ORT_PARALLEL)
6992 rtype = "parallel";
6993 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
6994 rtype = "taskloop";
6995 else if (ctx->region_type & ORT_TASK)
6996 rtype = "task";
6997 else if (ctx->region_type & ORT_TEAMS)
6998 rtype = "teams";
6999 else
7000 gcc_unreachable ();
7002 error ("%qE not specified in enclosing %qs",
7003 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7004 error_at (ctx->location, "enclosing %qs", rtype);
7006 /* FALLTHRU */
7007 case OMP_CLAUSE_DEFAULT_SHARED:
7008 flags |= GOVD_SHARED;
7009 break;
7010 case OMP_CLAUSE_DEFAULT_PRIVATE:
7011 flags |= GOVD_PRIVATE;
7012 break;
7013 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7014 flags |= GOVD_FIRSTPRIVATE;
7015 break;
7016 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7017 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7018 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7019 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7021 omp_notice_variable (octx, decl, in_code);
7022 for (; octx; octx = octx->outer_context)
7024 splay_tree_node n2;
7026 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7027 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7028 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7029 continue;
7030 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7032 flags |= GOVD_FIRSTPRIVATE;
7033 goto found_outer;
7035 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7037 flags |= GOVD_SHARED;
7038 goto found_outer;
7043 if (TREE_CODE (decl) == PARM_DECL
7044 || (!is_global_var (decl)
7045 && DECL_CONTEXT (decl) == current_function_decl))
7046 flags |= GOVD_FIRSTPRIVATE;
7047 else
7048 flags |= GOVD_SHARED;
7049 found_outer:
7050 break;
7052 default:
7053 gcc_unreachable ();
7056 return flags;
7060 /* Determine outer default flags for DECL mentioned in an OACC region
7061 but not declared in an enclosing clause. */
7063 static unsigned
7064 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7066 const char *rkind;
7067 bool on_device = false;
7068 bool declared = is_oacc_declared (decl);
7069 tree type = TREE_TYPE (decl);
7071 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7072 type = TREE_TYPE (type);
7074 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7075 && is_global_var (decl)
7076 && device_resident_p (decl))
7078 on_device = true;
7079 flags |= GOVD_MAP_TO_ONLY;
7082 switch (ctx->region_type)
7084 case ORT_ACC_KERNELS:
7085 rkind = "kernels";
7087 if (AGGREGATE_TYPE_P (type))
7089 /* Aggregates default to 'present_or_copy', or 'present'. */
7090 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7091 flags |= GOVD_MAP;
7092 else
7093 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7095 else
7096 /* Scalars default to 'copy'. */
7097 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7099 break;
7101 case ORT_ACC_PARALLEL:
7102 rkind = "parallel";
7104 if (on_device || declared)
7105 flags |= GOVD_MAP;
7106 else if (AGGREGATE_TYPE_P (type))
7108 /* Aggregates default to 'present_or_copy', or 'present'. */
7109 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7110 flags |= GOVD_MAP;
7111 else
7112 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7114 else
7115 /* Scalars default to 'firstprivate'. */
7116 flags |= GOVD_FIRSTPRIVATE;
7118 break;
7120 default:
7121 gcc_unreachable ();
7124 if (DECL_ARTIFICIAL (decl))
7125 ; /* We can get compiler-generated decls, and should not complain
7126 about them. */
7127 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7129 error ("%qE not specified in enclosing OpenACC %qs construct",
7130 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7131 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7133 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7134 ; /* Handled above. */
7135 else
7136 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7138 return flags;
7141 /* Record the fact that DECL was used within the OMP context CTX.
7142 IN_CODE is true when real code uses DECL, and false when we should
7143 merely emit default(none) errors. Return true if DECL is going to
7144 be remapped and thus DECL shouldn't be gimplified into its
7145 DECL_VALUE_EXPR (if any). */
7147 static bool
7148 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7150 splay_tree_node n;
7151 unsigned flags = in_code ? GOVD_SEEN : 0;
7152 bool ret = false, shared;
7154 if (error_operand_p (decl))
7155 return false;
7157 if (ctx->region_type == ORT_NONE)
7158 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7160 if (is_global_var (decl))
7162 /* Threadprivate variables are predetermined. */
7163 if (DECL_THREAD_LOCAL_P (decl))
7164 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7166 if (DECL_HAS_VALUE_EXPR_P (decl))
7168 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7170 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7171 return omp_notice_threadprivate_variable (ctx, decl, value);
7174 if (gimplify_omp_ctxp->outer_context == NULL
7175 && VAR_P (decl)
7176 && oacc_get_fn_attrib (current_function_decl))
7178 location_t loc = DECL_SOURCE_LOCATION (decl);
7180 if (lookup_attribute ("omp declare target link",
7181 DECL_ATTRIBUTES (decl)))
7183 error_at (loc,
7184 "%qE with %<link%> clause used in %<routine%> function",
7185 DECL_NAME (decl));
7186 return false;
7188 else if (!lookup_attribute ("omp declare target",
7189 DECL_ATTRIBUTES (decl)))
7191 error_at (loc,
7192 "%qE requires a %<declare%> directive for use "
7193 "in a %<routine%> function", DECL_NAME (decl));
7194 return false;
7199 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7200 if ((ctx->region_type & ORT_TARGET) != 0)
7202 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7203 if (n == NULL)
7205 unsigned nflags = flags;
7206 if ((ctx->region_type & ORT_ACC) == 0)
7208 bool is_declare_target = false;
7209 if (is_global_var (decl)
7210 && varpool_node::get_create (decl)->offloadable)
7212 struct gimplify_omp_ctx *octx;
7213 for (octx = ctx->outer_context;
7214 octx; octx = octx->outer_context)
7216 n = splay_tree_lookup (octx->variables,
7217 (splay_tree_key)decl);
7218 if (n
7219 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7220 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7221 break;
7223 is_declare_target = octx == NULL;
7225 if (!is_declare_target)
7227 int gdmk;
7228 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7229 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7230 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7231 == POINTER_TYPE)))
7232 gdmk = GDMK_POINTER;
7233 else if (lang_hooks.decls.omp_scalar_p (decl))
7234 gdmk = GDMK_SCALAR;
7235 else
7236 gdmk = GDMK_AGGREGATE;
7237 if (ctx->defaultmap[gdmk] == 0)
7239 tree d = lang_hooks.decls.omp_report_decl (decl);
7240 error ("%qE not specified in enclosing %<target%>",
7241 DECL_NAME (d));
7242 error_at (ctx->location, "enclosing %<target%>");
7244 else if (ctx->defaultmap[gdmk]
7245 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7246 nflags |= ctx->defaultmap[gdmk];
7247 else
7249 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7250 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7255 struct gimplify_omp_ctx *octx = ctx->outer_context;
7256 if ((ctx->region_type & ORT_ACC) && octx)
7258 /* Look in outer OpenACC contexts, to see if there's a
7259 data attribute for this variable. */
7260 omp_notice_variable (octx, decl, in_code);
7262 for (; octx; octx = octx->outer_context)
7264 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7265 break;
7266 splay_tree_node n2
7267 = splay_tree_lookup (octx->variables,
7268 (splay_tree_key) decl);
7269 if (n2)
7271 if (octx->region_type == ORT_ACC_HOST_DATA)
7272 error ("variable %qE declared in enclosing "
7273 "%<host_data%> region", DECL_NAME (decl));
7274 nflags |= GOVD_MAP;
7275 if (octx->region_type == ORT_ACC_DATA
7276 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7277 nflags |= GOVD_MAP_0LEN_ARRAY;
7278 goto found_outer;
7283 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7284 | GOVD_MAP_ALLOC_ONLY)) == flags)
7286 tree type = TREE_TYPE (decl);
7288 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7289 && lang_hooks.decls.omp_privatize_by_reference (decl))
7290 type = TREE_TYPE (type);
7291 if (!lang_hooks.types.omp_mappable_type (type))
7293 error ("%qD referenced in target region does not have "
7294 "a mappable type", decl);
7295 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7297 else
7299 if ((ctx->region_type & ORT_ACC) != 0)
7300 nflags = oacc_default_clause (ctx, decl, flags);
7301 else
7302 nflags |= GOVD_MAP;
7305 found_outer:
7306 omp_add_variable (ctx, decl, nflags);
7308 else
7310 /* If nothing changed, there's nothing left to do. */
7311 if ((n->value & flags) == flags)
7312 return ret;
7313 flags |= n->value;
7314 n->value = flags;
7316 goto do_outer;
7319 if (n == NULL)
7321 if (ctx->region_type == ORT_WORKSHARE
7322 || ctx->region_type == ORT_TASKGROUP
7323 || ctx->region_type == ORT_SIMD
7324 || ctx->region_type == ORT_ACC
7325 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7326 goto do_outer;
7328 flags = omp_default_clause (ctx, decl, in_code, flags);
7330 if ((flags & GOVD_PRIVATE)
7331 && lang_hooks.decls.omp_private_outer_ref (decl))
7332 flags |= GOVD_PRIVATE_OUTER_REF;
7334 omp_add_variable (ctx, decl, flags);
7336 shared = (flags & GOVD_SHARED) != 0;
7337 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7338 goto do_outer;
7341 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7342 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7343 && DECL_SIZE (decl))
7345 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7347 splay_tree_node n2;
7348 tree t = DECL_VALUE_EXPR (decl);
7349 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7350 t = TREE_OPERAND (t, 0);
7351 gcc_assert (DECL_P (t));
7352 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7353 n2->value |= GOVD_SEEN;
7355 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7356 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7357 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7358 != INTEGER_CST))
7360 splay_tree_node n2;
7361 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7362 gcc_assert (DECL_P (t));
7363 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7364 if (n2)
7365 omp_notice_variable (ctx, t, true);
7369 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7370 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7372 /* If nothing changed, there's nothing left to do. */
7373 if ((n->value & flags) == flags)
7374 return ret;
7375 flags |= n->value;
7376 n->value = flags;
7378 do_outer:
7379 /* If the variable is private in the current context, then we don't
7380 need to propagate anything to an outer context. */
7381 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7382 return ret;
7383 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7384 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7385 return ret;
7386 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7387 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7388 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7389 return ret;
7390 if (ctx->outer_context
7391 && omp_notice_variable (ctx->outer_context, decl, in_code))
7392 return true;
7393 return ret;
7396 /* Verify that DECL is private within CTX. If there's specific information
7397 to the contrary in the innermost scope, generate an error. */
7399 static bool
7400 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7402 splay_tree_node n;
7404 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7405 if (n != NULL)
7407 if (n->value & GOVD_SHARED)
7409 if (ctx == gimplify_omp_ctxp)
7411 if (simd)
7412 error ("iteration variable %qE is predetermined linear",
7413 DECL_NAME (decl));
7414 else
7415 error ("iteration variable %qE should be private",
7416 DECL_NAME (decl));
7417 n->value = GOVD_PRIVATE;
7418 return true;
7420 else
7421 return false;
7423 else if ((n->value & GOVD_EXPLICIT) != 0
7424 && (ctx == gimplify_omp_ctxp
7425 || (ctx->region_type == ORT_COMBINED_PARALLEL
7426 && gimplify_omp_ctxp->outer_context == ctx)))
7428 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7429 error ("iteration variable %qE should not be firstprivate",
7430 DECL_NAME (decl));
7431 else if ((n->value & GOVD_REDUCTION) != 0)
7432 error ("iteration variable %qE should not be reduction",
7433 DECL_NAME (decl));
7434 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7435 error ("iteration variable %qE should not be linear",
7436 DECL_NAME (decl));
7438 return (ctx == gimplify_omp_ctxp
7439 || (ctx->region_type == ORT_COMBINED_PARALLEL
7440 && gimplify_omp_ctxp->outer_context == ctx));
7443 if (ctx->region_type != ORT_WORKSHARE
7444 && ctx->region_type != ORT_TASKGROUP
7445 && ctx->region_type != ORT_SIMD
7446 && ctx->region_type != ORT_ACC)
7447 return false;
7448 else if (ctx->outer_context)
7449 return omp_is_private (ctx->outer_context, decl, simd);
7450 return false;
7453 /* Return true if DECL is private within a parallel region
7454 that binds to the current construct's context or in parallel
7455 region's REDUCTION clause. */
7457 static bool
7458 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7460 splay_tree_node n;
7464 ctx = ctx->outer_context;
7465 if (ctx == NULL)
7467 if (is_global_var (decl))
7468 return false;
7470 /* References might be private, but might be shared too,
7471 when checking for copyprivate, assume they might be
7472 private, otherwise assume they might be shared. */
7473 if (copyprivate)
7474 return true;
7476 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7477 return false;
7479 /* Treat C++ privatized non-static data members outside
7480 of the privatization the same. */
7481 if (omp_member_access_dummy_var (decl))
7482 return false;
7484 return true;
7487 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7489 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7490 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7491 continue;
7493 if (n != NULL)
7495 if ((n->value & GOVD_LOCAL) != 0
7496 && omp_member_access_dummy_var (decl))
7497 return false;
7498 return (n->value & GOVD_SHARED) == 0;
7501 while (ctx->region_type == ORT_WORKSHARE
7502 || ctx->region_type == ORT_TASKGROUP
7503 || ctx->region_type == ORT_SIMD
7504 || ctx->region_type == ORT_ACC);
7505 return false;
7508 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7510 static tree
7511 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7513 tree t = *tp;
7515 /* If this node has been visited, unmark it and keep looking. */
7516 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7517 return t;
7519 if (IS_TYPE_OR_DECL_P (t))
7520 *walk_subtrees = 0;
7521 return NULL_TREE;
7524 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7525 lower all the depend clauses by populating corresponding depend
7526 array. Returns 0 if there are no such depend clauses, or
7527 2 if all depend clauses should be removed, 1 otherwise. */
7529 static int
7530 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7532 tree c;
7533 gimple *g;
7534 size_t n[4] = { 0, 0, 0, 0 };
7535 bool unused[4];
7536 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7537 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7538 size_t i, j;
7539 location_t first_loc = UNKNOWN_LOCATION;
7541 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7542 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7544 switch (OMP_CLAUSE_DEPEND_KIND (c))
7546 case OMP_CLAUSE_DEPEND_IN:
7547 i = 2;
7548 break;
7549 case OMP_CLAUSE_DEPEND_OUT:
7550 case OMP_CLAUSE_DEPEND_INOUT:
7551 i = 0;
7552 break;
7553 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7554 i = 1;
7555 break;
7556 case OMP_CLAUSE_DEPEND_DEPOBJ:
7557 i = 3;
7558 break;
7559 case OMP_CLAUSE_DEPEND_SOURCE:
7560 case OMP_CLAUSE_DEPEND_SINK:
7561 continue;
7562 default:
7563 gcc_unreachable ();
7565 tree t = OMP_CLAUSE_DECL (c);
7566 if (first_loc == UNKNOWN_LOCATION)
7567 first_loc = OMP_CLAUSE_LOCATION (c);
7568 if (TREE_CODE (t) == TREE_LIST
7569 && TREE_PURPOSE (t)
7570 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7572 if (TREE_PURPOSE (t) != last_iter)
7574 tree tcnt = size_one_node;
7575 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7577 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7578 is_gimple_val, fb_rvalue) == GS_ERROR
7579 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7580 is_gimple_val, fb_rvalue) == GS_ERROR
7581 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7582 is_gimple_val, fb_rvalue) == GS_ERROR
7583 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7584 is_gimple_val, fb_rvalue)
7585 == GS_ERROR))
7586 return 2;
7587 tree var = TREE_VEC_ELT (it, 0);
7588 tree begin = TREE_VEC_ELT (it, 1);
7589 tree end = TREE_VEC_ELT (it, 2);
7590 tree step = TREE_VEC_ELT (it, 3);
7591 tree orig_step = TREE_VEC_ELT (it, 4);
7592 tree type = TREE_TYPE (var);
7593 tree stype = TREE_TYPE (step);
7594 location_t loc = DECL_SOURCE_LOCATION (var);
7595 tree endmbegin;
7596 /* Compute count for this iterator as
7597 orig_step > 0
7598 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7599 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7600 and compute product of those for the entire depend
7601 clause. */
7602 if (POINTER_TYPE_P (type))
7603 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7604 stype, end, begin);
7605 else
7606 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7607 end, begin);
7608 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7609 step,
7610 build_int_cst (stype, 1));
7611 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7612 build_int_cst (stype, 1));
7613 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7614 unshare_expr (endmbegin),
7615 stepm1);
7616 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7617 pos, step);
7618 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7619 endmbegin, stepp1);
7620 if (TYPE_UNSIGNED (stype))
7622 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7623 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7625 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7626 neg, step);
7627 step = NULL_TREE;
7628 tree cond = fold_build2_loc (loc, LT_EXPR,
7629 boolean_type_node,
7630 begin, end);
7631 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7632 build_int_cst (stype, 0));
7633 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7634 end, begin);
7635 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7636 build_int_cst (stype, 0));
7637 tree osteptype = TREE_TYPE (orig_step);
7638 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7639 orig_step,
7640 build_int_cst (osteptype, 0));
7641 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7642 cond, pos, neg);
7643 cnt = fold_convert_loc (loc, sizetype, cnt);
7644 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7645 fb_rvalue) == GS_ERROR)
7646 return 2;
7647 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7649 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7650 fb_rvalue) == GS_ERROR)
7651 return 2;
7652 last_iter = TREE_PURPOSE (t);
7653 last_count = tcnt;
7655 if (counts[i] == NULL_TREE)
7656 counts[i] = last_count;
7657 else
7658 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7659 PLUS_EXPR, counts[i], last_count);
7661 else
7662 n[i]++;
7664 for (i = 0; i < 4; i++)
7665 if (counts[i])
7666 break;
7667 if (i == 4)
7668 return 0;
7670 tree total = size_zero_node;
7671 for (i = 0; i < 4; i++)
7673 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7674 if (counts[i] == NULL_TREE)
7675 counts[i] = size_zero_node;
7676 if (n[i])
7677 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7678 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7679 fb_rvalue) == GS_ERROR)
7680 return 2;
7681 total = size_binop (PLUS_EXPR, total, counts[i]);
7684 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7685 == GS_ERROR)
7686 return 2;
7687 bool is_old = unused[1] && unused[3];
7688 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7689 size_int (is_old ? 1 : 4));
7690 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7691 tree array = create_tmp_var_raw (type);
7692 TREE_ADDRESSABLE (array) = 1;
7693 if (TREE_CODE (totalpx) != INTEGER_CST)
7695 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7696 gimplify_type_sizes (TREE_TYPE (array), pre_p);
7697 if (gimplify_omp_ctxp)
7699 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7700 while (ctx
7701 && (ctx->region_type == ORT_WORKSHARE
7702 || ctx->region_type == ORT_TASKGROUP
7703 || ctx->region_type == ORT_SIMD
7704 || ctx->region_type == ORT_ACC))
7705 ctx = ctx->outer_context;
7706 if (ctx)
7707 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7709 gimplify_vla_decl (array, pre_p);
7711 else
7712 gimple_add_tmp_var (array);
7713 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7714 NULL_TREE);
7715 tree tem;
7716 if (!is_old)
7718 tem = build2 (MODIFY_EXPR, void_type_node, r,
7719 build_int_cst (ptr_type_node, 0));
7720 gimplify_and_add (tem, pre_p);
7721 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7722 NULL_TREE);
7724 tem = build2 (MODIFY_EXPR, void_type_node, r,
7725 fold_convert (ptr_type_node, total));
7726 gimplify_and_add (tem, pre_p);
7727 for (i = 1; i < (is_old ? 2 : 4); i++)
7729 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
7730 NULL_TREE, NULL_TREE);
7731 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
7732 gimplify_and_add (tem, pre_p);
7735 tree cnts[4];
7736 for (j = 4; j; j--)
7737 if (!unused[j - 1])
7738 break;
7739 for (i = 0; i < 4; i++)
7741 if (i && (i >= j || unused[i - 1]))
7743 cnts[i] = cnts[i - 1];
7744 continue;
7746 cnts[i] = create_tmp_var (sizetype);
7747 if (i == 0)
7748 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
7749 else
7751 tree t;
7752 if (is_old)
7753 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
7754 else
7755 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
7756 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
7757 == GS_ERROR)
7758 return 2;
7759 g = gimple_build_assign (cnts[i], t);
7761 gimple_seq_add_stmt (pre_p, g);
7764 last_iter = NULL_TREE;
7765 tree last_bind = NULL_TREE;
7766 tree *last_body = NULL;
7767 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7768 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7770 switch (OMP_CLAUSE_DEPEND_KIND (c))
7772 case OMP_CLAUSE_DEPEND_IN:
7773 i = 2;
7774 break;
7775 case OMP_CLAUSE_DEPEND_OUT:
7776 case OMP_CLAUSE_DEPEND_INOUT:
7777 i = 0;
7778 break;
7779 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7780 i = 1;
7781 break;
7782 case OMP_CLAUSE_DEPEND_DEPOBJ:
7783 i = 3;
7784 break;
7785 case OMP_CLAUSE_DEPEND_SOURCE:
7786 case OMP_CLAUSE_DEPEND_SINK:
7787 continue;
7788 default:
7789 gcc_unreachable ();
7791 tree t = OMP_CLAUSE_DECL (c);
7792 if (TREE_CODE (t) == TREE_LIST
7793 && TREE_PURPOSE (t)
7794 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7796 if (TREE_PURPOSE (t) != last_iter)
7798 if (last_bind)
7799 gimplify_and_add (last_bind, pre_p);
7800 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
7801 last_bind = build3 (BIND_EXPR, void_type_node,
7802 BLOCK_VARS (block), NULL, block);
7803 TREE_SIDE_EFFECTS (last_bind) = 1;
7804 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
7805 tree *p = &BIND_EXPR_BODY (last_bind);
7806 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7808 tree var = TREE_VEC_ELT (it, 0);
7809 tree begin = TREE_VEC_ELT (it, 1);
7810 tree end = TREE_VEC_ELT (it, 2);
7811 tree step = TREE_VEC_ELT (it, 3);
7812 tree orig_step = TREE_VEC_ELT (it, 4);
7813 tree type = TREE_TYPE (var);
7814 location_t loc = DECL_SOURCE_LOCATION (var);
7815 /* Emit:
7816 var = begin;
7817 goto cond_label;
7818 beg_label:
7820 var = var + step;
7821 cond_label:
7822 if (orig_step > 0) {
7823 if (var < end) goto beg_label;
7824 } else {
7825 if (var > end) goto beg_label;
7827 for each iterator, with inner iterators added to
7828 the ... above. */
7829 tree beg_label = create_artificial_label (loc);
7830 tree cond_label = NULL_TREE;
7831 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
7832 var, begin);
7833 append_to_statement_list_force (tem, p);
7834 tem = build_and_jump (&cond_label);
7835 append_to_statement_list_force (tem, p);
7836 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
7837 append_to_statement_list (tem, p);
7838 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
7839 NULL_TREE, NULL_TREE);
7840 TREE_SIDE_EFFECTS (bind) = 1;
7841 SET_EXPR_LOCATION (bind, loc);
7842 append_to_statement_list_force (bind, p);
7843 if (POINTER_TYPE_P (type))
7844 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
7845 var, fold_convert_loc (loc, sizetype,
7846 step));
7847 else
7848 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
7849 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
7850 var, tem);
7851 append_to_statement_list_force (tem, p);
7852 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
7853 append_to_statement_list (tem, p);
7854 tree cond = fold_build2_loc (loc, LT_EXPR,
7855 boolean_type_node,
7856 var, end);
7857 tree pos
7858 = fold_build3_loc (loc, COND_EXPR, void_type_node,
7859 cond, build_and_jump (&beg_label),
7860 void_node);
7861 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7862 var, end);
7863 tree neg
7864 = fold_build3_loc (loc, COND_EXPR, void_type_node,
7865 cond, build_and_jump (&beg_label),
7866 void_node);
7867 tree osteptype = TREE_TYPE (orig_step);
7868 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7869 orig_step,
7870 build_int_cst (osteptype, 0));
7871 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
7872 cond, pos, neg);
7873 append_to_statement_list_force (tem, p);
7874 p = &BIND_EXPR_BODY (bind);
7876 last_body = p;
7878 last_iter = TREE_PURPOSE (t);
7879 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
7881 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
7882 0), last_body);
7883 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
7885 if (error_operand_p (TREE_VALUE (t)))
7886 return 2;
7887 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
7888 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
7889 NULL_TREE, NULL_TREE);
7890 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
7891 void_type_node, r, TREE_VALUE (t));
7892 append_to_statement_list_force (tem, last_body);
7893 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
7894 void_type_node, cnts[i],
7895 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
7896 append_to_statement_list_force (tem, last_body);
7897 TREE_VALUE (t) = null_pointer_node;
7899 else
7901 if (last_bind)
7903 gimplify_and_add (last_bind, pre_p);
7904 last_bind = NULL_TREE;
7906 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7908 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7909 NULL, is_gimple_val, fb_rvalue);
7910 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7912 if (error_operand_p (OMP_CLAUSE_DECL (c)))
7913 return 2;
7914 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
7915 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7916 is_gimple_val, fb_rvalue) == GS_ERROR)
7917 return 2;
7918 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
7919 NULL_TREE, NULL_TREE);
7920 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
7921 gimplify_and_add (tem, pre_p);
7922 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
7923 size_int (1)));
7924 gimple_seq_add_stmt (pre_p, g);
7927 if (last_bind)
7928 gimplify_and_add (last_bind, pre_p);
7929 tree cond = boolean_false_node;
7930 if (is_old)
7932 if (!unused[0])
7933 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
7934 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
7935 size_int (2)));
7936 if (!unused[2])
7937 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
7938 build2_loc (first_loc, NE_EXPR, boolean_type_node,
7939 cnts[2],
7940 size_binop_loc (first_loc, PLUS_EXPR,
7941 totalpx,
7942 size_int (1))));
7944 else
7946 tree prev = size_int (5);
7947 for (i = 0; i < 4; i++)
7949 if (unused[i])
7950 continue;
7951 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
7952 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
7953 build2_loc (first_loc, NE_EXPR, boolean_type_node,
7954 cnts[i], unshare_expr (prev)));
7957 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
7958 build_call_expr_loc (first_loc,
7959 builtin_decl_explicit (BUILT_IN_TRAP),
7960 0), void_node);
7961 gimplify_and_add (tem, pre_p);
7962 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7963 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
7964 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7965 OMP_CLAUSE_CHAIN (c) = *list_p;
7966 *list_p = c;
7967 return 1;
7970 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7971 and previous omp contexts. */
7973 static void
7974 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7975 enum omp_region_type region_type,
7976 enum tree_code code)
7978 struct gimplify_omp_ctx *ctx, *outer_ctx;
7979 tree c;
7980 hash_map<tree, tree> *struct_map_to_clause = NULL;
7981 tree *prev_list_p = NULL;
7982 int handled_depend_iterators = -1;
7983 int nowait = -1;
7985 ctx = new_omp_context (region_type);
7986 outer_ctx = ctx->outer_context;
7987 if (code == OMP_TARGET)
7989 if (!lang_GNU_Fortran ())
7990 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7991 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
7993 if (!lang_GNU_Fortran ())
7994 switch (code)
7996 case OMP_TARGET:
7997 case OMP_TARGET_DATA:
7998 case OMP_TARGET_ENTER_DATA:
7999 case OMP_TARGET_EXIT_DATA:
8000 case OACC_DECLARE:
8001 case OACC_HOST_DATA:
8002 case OACC_PARALLEL:
8003 case OACC_KERNELS:
8004 ctx->target_firstprivatize_array_bases = true;
8005 default:
8006 break;
8009 while ((c = *list_p) != NULL)
8011 bool remove = false;
8012 bool notice_outer = true;
8013 const char *check_non_private = NULL;
8014 unsigned int flags;
8015 tree decl;
8017 switch (OMP_CLAUSE_CODE (c))
8019 case OMP_CLAUSE_PRIVATE:
8020 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8021 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8023 flags |= GOVD_PRIVATE_OUTER_REF;
8024 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8026 else
8027 notice_outer = false;
8028 goto do_add;
8029 case OMP_CLAUSE_SHARED:
8030 flags = GOVD_SHARED | GOVD_EXPLICIT;
8031 goto do_add;
8032 case OMP_CLAUSE_FIRSTPRIVATE:
8033 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8034 check_non_private = "firstprivate";
8035 goto do_add;
8036 case OMP_CLAUSE_LASTPRIVATE:
8037 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8038 switch (code)
8040 case OMP_DISTRIBUTE:
8041 error_at (OMP_CLAUSE_LOCATION (c),
8042 "conditional %<lastprivate%> clause on "
8043 "%<distribute%> construct");
8044 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8045 break;
8046 case OMP_TASKLOOP:
8047 error_at (OMP_CLAUSE_LOCATION (c),
8048 "conditional %<lastprivate%> clause on "
8049 "%<taskloop%> construct");
8050 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8051 break;
8052 default:
8053 break;
8055 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8056 check_non_private = "lastprivate";
8057 decl = OMP_CLAUSE_DECL (c);
8058 if (error_operand_p (decl))
8059 goto do_add;
8060 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8061 && !lang_hooks.decls.omp_scalar_p (decl))
8063 error_at (OMP_CLAUSE_LOCATION (c),
8064 "non-scalar variable %qD in conditional "
8065 "%<lastprivate%> clause", decl);
8066 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8068 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8069 sorry_at (OMP_CLAUSE_LOCATION (c),
8070 "%<conditional%> modifier on %<lastprivate%> clause "
8071 "not supported yet");
8072 if (outer_ctx
8073 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8074 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8075 == ORT_COMBINED_TEAMS))
8076 && splay_tree_lookup (outer_ctx->variables,
8077 (splay_tree_key) decl) == NULL)
8079 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8080 if (outer_ctx->outer_context)
8081 omp_notice_variable (outer_ctx->outer_context, decl, true);
8083 else if (outer_ctx
8084 && (outer_ctx->region_type & ORT_TASK) != 0
8085 && outer_ctx->combined_loop
8086 && splay_tree_lookup (outer_ctx->variables,
8087 (splay_tree_key) decl) == NULL)
8089 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8090 if (outer_ctx->outer_context)
8091 omp_notice_variable (outer_ctx->outer_context, decl, true);
8093 else if (outer_ctx
8094 && (outer_ctx->region_type == ORT_WORKSHARE
8095 || outer_ctx->region_type == ORT_ACC)
8096 && outer_ctx->combined_loop
8097 && splay_tree_lookup (outer_ctx->variables,
8098 (splay_tree_key) decl) == NULL
8099 && !omp_check_private (outer_ctx, decl, false))
8101 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8102 if (outer_ctx->outer_context
8103 && (outer_ctx->outer_context->region_type
8104 == ORT_COMBINED_PARALLEL)
8105 && splay_tree_lookup (outer_ctx->outer_context->variables,
8106 (splay_tree_key) decl) == NULL)
8108 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8109 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8110 if (octx->outer_context)
8112 octx = octx->outer_context;
8113 if (octx->region_type == ORT_WORKSHARE
8114 && octx->combined_loop
8115 && splay_tree_lookup (octx->variables,
8116 (splay_tree_key) decl) == NULL
8117 && !omp_check_private (octx, decl, false))
8119 omp_add_variable (octx, decl,
8120 GOVD_LASTPRIVATE | GOVD_SEEN);
8121 octx = octx->outer_context;
8122 if (octx
8123 && ((octx->region_type & ORT_COMBINED_TEAMS)
8124 == ORT_COMBINED_TEAMS)
8125 && (splay_tree_lookup (octx->variables,
8126 (splay_tree_key) decl)
8127 == NULL))
8129 omp_add_variable (octx, decl,
8130 GOVD_SHARED | GOVD_SEEN);
8131 octx = octx->outer_context;
8134 if (octx)
8135 omp_notice_variable (octx, decl, true);
8138 else if (outer_ctx->outer_context)
8139 omp_notice_variable (outer_ctx->outer_context, decl, true);
8141 goto do_add;
8142 case OMP_CLAUSE_REDUCTION:
8143 if (OMP_CLAUSE_REDUCTION_TASK (c))
8145 if (region_type == ORT_WORKSHARE)
8147 if (nowait == -1)
8148 nowait = omp_find_clause (*list_p,
8149 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8150 if (nowait
8151 && (outer_ctx == NULL
8152 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8154 error_at (OMP_CLAUSE_LOCATION (c),
8155 "%<task%> reduction modifier on a construct "
8156 "with a %<nowait%> clause");
8157 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8160 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8162 error_at (OMP_CLAUSE_LOCATION (c),
8163 "invalid %<task%> reduction modifier on construct "
8164 "other than %<parallel%>, %<for%> or %<sections%>");
8165 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8168 /* FALLTHRU */
8169 case OMP_CLAUSE_IN_REDUCTION:
8170 case OMP_CLAUSE_TASK_REDUCTION:
8171 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8172 /* OpenACC permits reductions on private variables. */
8173 if (!(region_type & ORT_ACC)
8174 /* taskgroup is actually not a worksharing region. */
8175 && code != OMP_TASKGROUP)
8176 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8177 decl = OMP_CLAUSE_DECL (c);
8178 if (TREE_CODE (decl) == MEM_REF)
8180 tree type = TREE_TYPE (decl);
8181 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8182 NULL, is_gimple_val, fb_rvalue, false)
8183 == GS_ERROR)
8185 remove = true;
8186 break;
8188 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8189 if (DECL_P (v))
8191 omp_firstprivatize_variable (ctx, v);
8192 omp_notice_variable (ctx, v, true);
8194 decl = TREE_OPERAND (decl, 0);
8195 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8197 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8198 NULL, is_gimple_val, fb_rvalue, false)
8199 == GS_ERROR)
8201 remove = true;
8202 break;
8204 v = TREE_OPERAND (decl, 1);
8205 if (DECL_P (v))
8207 omp_firstprivatize_variable (ctx, v);
8208 omp_notice_variable (ctx, v, true);
8210 decl = TREE_OPERAND (decl, 0);
8212 if (TREE_CODE (decl) == ADDR_EXPR
8213 || TREE_CODE (decl) == INDIRECT_REF)
8214 decl = TREE_OPERAND (decl, 0);
8216 goto do_add_decl;
8217 case OMP_CLAUSE_LINEAR:
8218 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8219 is_gimple_val, fb_rvalue) == GS_ERROR)
8221 remove = true;
8222 break;
8224 else
8226 if (code == OMP_SIMD
8227 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8229 struct gimplify_omp_ctx *octx = outer_ctx;
8230 if (octx
8231 && octx->region_type == ORT_WORKSHARE
8232 && octx->combined_loop
8233 && !octx->distribute)
8235 if (octx->outer_context
8236 && (octx->outer_context->region_type
8237 == ORT_COMBINED_PARALLEL))
8238 octx = octx->outer_context->outer_context;
8239 else
8240 octx = octx->outer_context;
8242 if (octx
8243 && octx->region_type == ORT_WORKSHARE
8244 && octx->combined_loop
8245 && octx->distribute)
8247 error_at (OMP_CLAUSE_LOCATION (c),
8248 "%<linear%> clause for variable other than "
8249 "loop iterator specified on construct "
8250 "combined with %<distribute%>");
8251 remove = true;
8252 break;
8255 /* For combined #pragma omp parallel for simd, need to put
8256 lastprivate and perhaps firstprivate too on the
8257 parallel. Similarly for #pragma omp for simd. */
8258 struct gimplify_omp_ctx *octx = outer_ctx;
8259 decl = NULL_TREE;
8262 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8263 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8264 break;
8265 decl = OMP_CLAUSE_DECL (c);
8266 if (error_operand_p (decl))
8268 decl = NULL_TREE;
8269 break;
8271 flags = GOVD_SEEN;
8272 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8273 flags |= GOVD_FIRSTPRIVATE;
8274 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8275 flags |= GOVD_LASTPRIVATE;
8276 if (octx
8277 && octx->region_type == ORT_WORKSHARE
8278 && octx->combined_loop)
8280 if (octx->outer_context
8281 && (octx->outer_context->region_type
8282 == ORT_COMBINED_PARALLEL))
8283 octx = octx->outer_context;
8284 else if (omp_check_private (octx, decl, false))
8285 break;
8287 else if (octx
8288 && (octx->region_type & ORT_TASK) != 0
8289 && octx->combined_loop)
8291 else if (octx
8292 && octx->region_type == ORT_COMBINED_PARALLEL
8293 && ctx->region_type == ORT_WORKSHARE
8294 && octx == outer_ctx)
8295 flags = GOVD_SEEN | GOVD_SHARED;
8296 else if (octx
8297 && ((octx->region_type & ORT_COMBINED_TEAMS)
8298 == ORT_COMBINED_TEAMS))
8299 flags = GOVD_SEEN | GOVD_SHARED;
8300 else if (octx
8301 && octx->region_type == ORT_COMBINED_TARGET)
8303 flags &= ~GOVD_LASTPRIVATE;
8304 if (flags == GOVD_SEEN)
8305 break;
8307 else
8308 break;
8309 splay_tree_node on
8310 = splay_tree_lookup (octx->variables,
8311 (splay_tree_key) decl);
8312 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
8314 octx = NULL;
8315 break;
8317 omp_add_variable (octx, decl, flags);
8318 if (octx->outer_context == NULL)
8319 break;
8320 octx = octx->outer_context;
8322 while (1);
8323 if (octx
8324 && decl
8325 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8326 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8327 omp_notice_variable (octx, decl, true);
8329 flags = GOVD_LINEAR | GOVD_EXPLICIT;
8330 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8331 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8333 notice_outer = false;
8334 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8336 goto do_add;
8338 case OMP_CLAUSE_MAP:
8339 decl = OMP_CLAUSE_DECL (c);
8340 if (error_operand_p (decl))
8341 remove = true;
8342 switch (code)
8344 case OMP_TARGET:
8345 break;
8346 case OACC_DATA:
8347 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8348 break;
8349 /* FALLTHRU */
8350 case OMP_TARGET_DATA:
8351 case OMP_TARGET_ENTER_DATA:
8352 case OMP_TARGET_EXIT_DATA:
8353 case OACC_ENTER_DATA:
8354 case OACC_EXIT_DATA:
8355 case OACC_HOST_DATA:
8356 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8357 || (OMP_CLAUSE_MAP_KIND (c)
8358 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8359 /* For target {,enter ,exit }data only the array slice is
8360 mapped, but not the pointer to it. */
8361 remove = true;
8362 break;
8363 default:
8364 break;
8366 if (remove)
8367 break;
8368 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8370 struct gimplify_omp_ctx *octx;
8371 for (octx = outer_ctx; octx; octx = octx->outer_context)
8373 if (octx->region_type != ORT_ACC_HOST_DATA)
8374 break;
8375 splay_tree_node n2
8376 = splay_tree_lookup (octx->variables,
8377 (splay_tree_key) decl);
8378 if (n2)
8379 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8380 "declared in enclosing %<host_data%> region",
8381 DECL_NAME (decl));
8384 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8385 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8386 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8387 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8388 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8390 remove = true;
8391 break;
8393 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8394 || (OMP_CLAUSE_MAP_KIND (c)
8395 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8396 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8398 OMP_CLAUSE_SIZE (c)
8399 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8400 false);
8401 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
8402 GOVD_FIRSTPRIVATE | GOVD_SEEN);
8404 if (!DECL_P (decl))
8406 tree d = decl, *pd;
8407 if (TREE_CODE (d) == ARRAY_REF)
8409 while (TREE_CODE (d) == ARRAY_REF)
8410 d = TREE_OPERAND (d, 0);
8411 if (TREE_CODE (d) == COMPONENT_REF
8412 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
8413 decl = d;
8415 pd = &OMP_CLAUSE_DECL (c);
8416 if (d == decl
8417 && TREE_CODE (decl) == INDIRECT_REF
8418 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8419 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8420 == REFERENCE_TYPE))
8422 pd = &TREE_OPERAND (decl, 0);
8423 decl = TREE_OPERAND (decl, 0);
8425 if (TREE_CODE (decl) == COMPONENT_REF)
8427 while (TREE_CODE (decl) == COMPONENT_REF)
8428 decl = TREE_OPERAND (decl, 0);
8429 if (TREE_CODE (decl) == INDIRECT_REF
8430 && DECL_P (TREE_OPERAND (decl, 0))
8431 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8432 == REFERENCE_TYPE))
8433 decl = TREE_OPERAND (decl, 0);
8435 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
8436 == GS_ERROR)
8438 remove = true;
8439 break;
8441 if (DECL_P (decl))
8443 if (error_operand_p (decl))
8445 remove = true;
8446 break;
8449 tree stype = TREE_TYPE (decl);
8450 if (TREE_CODE (stype) == REFERENCE_TYPE)
8451 stype = TREE_TYPE (stype);
8452 if (TYPE_SIZE_UNIT (stype) == NULL
8453 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
8455 error_at (OMP_CLAUSE_LOCATION (c),
8456 "mapping field %qE of variable length "
8457 "structure", OMP_CLAUSE_DECL (c));
8458 remove = true;
8459 break;
8462 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8464 /* Error recovery. */
8465 if (prev_list_p == NULL)
8467 remove = true;
8468 break;
8470 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8472 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
8473 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
8475 remove = true;
8476 break;
8481 tree offset;
8482 poly_int64 bitsize, bitpos;
8483 machine_mode mode;
8484 int unsignedp, reversep, volatilep = 0;
8485 tree base = OMP_CLAUSE_DECL (c);
8486 while (TREE_CODE (base) == ARRAY_REF)
8487 base = TREE_OPERAND (base, 0);
8488 if (TREE_CODE (base) == INDIRECT_REF)
8489 base = TREE_OPERAND (base, 0);
8490 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8491 &mode, &unsignedp, &reversep,
8492 &volatilep);
8493 tree orig_base = base;
8494 if ((TREE_CODE (base) == INDIRECT_REF
8495 || (TREE_CODE (base) == MEM_REF
8496 && integer_zerop (TREE_OPERAND (base, 1))))
8497 && DECL_P (TREE_OPERAND (base, 0))
8498 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8499 == REFERENCE_TYPE))
8500 base = TREE_OPERAND (base, 0);
8501 gcc_assert (base == decl
8502 && (offset == NULL_TREE
8503 || poly_int_tree_p (offset)));
8505 splay_tree_node n
8506 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8507 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
8508 == GOMP_MAP_ALWAYS_POINTER);
8509 if (n == NULL || (n->value & GOVD_MAP) == 0)
8511 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8512 OMP_CLAUSE_MAP);
8513 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
8514 if (orig_base != base)
8515 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
8516 else
8517 OMP_CLAUSE_DECL (l) = decl;
8518 OMP_CLAUSE_SIZE (l) = size_int (1);
8519 if (struct_map_to_clause == NULL)
8520 struct_map_to_clause = new hash_map<tree, tree>;
8521 struct_map_to_clause->put (decl, l);
8522 if (ptr)
8524 enum gomp_map_kind mkind
8525 = code == OMP_TARGET_EXIT_DATA
8526 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8527 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8528 OMP_CLAUSE_MAP);
8529 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8530 OMP_CLAUSE_DECL (c2)
8531 = unshare_expr (OMP_CLAUSE_DECL (c));
8532 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
8533 OMP_CLAUSE_SIZE (c2)
8534 = TYPE_SIZE_UNIT (ptr_type_node);
8535 OMP_CLAUSE_CHAIN (l) = c2;
8536 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8538 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8539 tree c3
8540 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8541 OMP_CLAUSE_MAP);
8542 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8543 OMP_CLAUSE_DECL (c3)
8544 = unshare_expr (OMP_CLAUSE_DECL (c4));
8545 OMP_CLAUSE_SIZE (c3)
8546 = TYPE_SIZE_UNIT (ptr_type_node);
8547 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8548 OMP_CLAUSE_CHAIN (c2) = c3;
8550 *prev_list_p = l;
8551 prev_list_p = NULL;
8553 else
8555 OMP_CLAUSE_CHAIN (l) = c;
8556 *list_p = l;
8557 list_p = &OMP_CLAUSE_CHAIN (l);
8559 if (orig_base != base && code == OMP_TARGET)
8561 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8562 OMP_CLAUSE_MAP);
8563 enum gomp_map_kind mkind
8564 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
8565 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8566 OMP_CLAUSE_DECL (c2) = decl;
8567 OMP_CLAUSE_SIZE (c2) = size_zero_node;
8568 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
8569 OMP_CLAUSE_CHAIN (l) = c2;
8571 flags = GOVD_MAP | GOVD_EXPLICIT;
8572 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8573 flags |= GOVD_SEEN;
8574 goto do_add_decl;
8576 else
8578 tree *osc = struct_map_to_clause->get (decl);
8579 tree *sc = NULL, *scp = NULL;
8580 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8581 n->value |= GOVD_SEEN;
8582 poly_offset_int o1, o2;
8583 if (offset)
8584 o1 = wi::to_poly_offset (offset);
8585 else
8586 o1 = 0;
8587 if (maybe_ne (bitpos, 0))
8588 o1 += bits_to_bytes_round_down (bitpos);
8589 sc = &OMP_CLAUSE_CHAIN (*osc);
8590 if (*sc != c
8591 && (OMP_CLAUSE_MAP_KIND (*sc)
8592 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8593 sc = &OMP_CLAUSE_CHAIN (*sc);
8594 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
8595 if (ptr && sc == prev_list_p)
8596 break;
8597 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8598 != COMPONENT_REF
8599 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8600 != INDIRECT_REF)
8601 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8602 != ARRAY_REF))
8603 break;
8604 else
8606 tree offset2;
8607 poly_int64 bitsize2, bitpos2;
8608 base = OMP_CLAUSE_DECL (*sc);
8609 if (TREE_CODE (base) == ARRAY_REF)
8611 while (TREE_CODE (base) == ARRAY_REF)
8612 base = TREE_OPERAND (base, 0);
8613 if (TREE_CODE (base) != COMPONENT_REF
8614 || (TREE_CODE (TREE_TYPE (base))
8615 != ARRAY_TYPE))
8616 break;
8618 else if (TREE_CODE (base) == INDIRECT_REF
8619 && (TREE_CODE (TREE_OPERAND (base, 0))
8620 == COMPONENT_REF)
8621 && (TREE_CODE (TREE_TYPE
8622 (TREE_OPERAND (base, 0)))
8623 == REFERENCE_TYPE))
8624 base = TREE_OPERAND (base, 0);
8625 base = get_inner_reference (base, &bitsize2,
8626 &bitpos2, &offset2,
8627 &mode, &unsignedp,
8628 &reversep, &volatilep);
8629 if ((TREE_CODE (base) == INDIRECT_REF
8630 || (TREE_CODE (base) == MEM_REF
8631 && integer_zerop (TREE_OPERAND (base,
8632 1))))
8633 && DECL_P (TREE_OPERAND (base, 0))
8634 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8635 0)))
8636 == REFERENCE_TYPE))
8637 base = TREE_OPERAND (base, 0);
8638 if (base != decl)
8639 break;
8640 if (scp)
8641 continue;
8642 gcc_assert (offset == NULL_TREE
8643 || poly_int_tree_p (offset));
8644 tree d1 = OMP_CLAUSE_DECL (*sc);
8645 tree d2 = OMP_CLAUSE_DECL (c);
8646 while (TREE_CODE (d1) == ARRAY_REF)
8647 d1 = TREE_OPERAND (d1, 0);
8648 while (TREE_CODE (d2) == ARRAY_REF)
8649 d2 = TREE_OPERAND (d2, 0);
8650 if (TREE_CODE (d1) == INDIRECT_REF)
8651 d1 = TREE_OPERAND (d1, 0);
8652 if (TREE_CODE (d2) == INDIRECT_REF)
8653 d2 = TREE_OPERAND (d2, 0);
8654 while (TREE_CODE (d1) == COMPONENT_REF)
8655 if (TREE_CODE (d2) == COMPONENT_REF
8656 && TREE_OPERAND (d1, 1)
8657 == TREE_OPERAND (d2, 1))
8659 d1 = TREE_OPERAND (d1, 0);
8660 d2 = TREE_OPERAND (d2, 0);
8662 else
8663 break;
8664 if (d1 == d2)
8666 error_at (OMP_CLAUSE_LOCATION (c),
8667 "%qE appears more than once in map "
8668 "clauses", OMP_CLAUSE_DECL (c));
8669 remove = true;
8670 break;
8672 if (offset2)
8673 o2 = wi::to_poly_offset (offset2);
8674 else
8675 o2 = 0;
8676 o2 += bits_to_bytes_round_down (bitpos2);
8677 if (maybe_lt (o1, o2)
8678 || (known_eq (o1, 2)
8679 && maybe_lt (bitpos, bitpos2)))
8681 if (ptr)
8682 scp = sc;
8683 else
8684 break;
8687 if (remove)
8688 break;
8689 OMP_CLAUSE_SIZE (*osc)
8690 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8691 size_one_node);
8692 if (ptr)
8694 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8695 OMP_CLAUSE_MAP);
8696 tree cl = NULL_TREE;
8697 enum gomp_map_kind mkind
8698 = code == OMP_TARGET_EXIT_DATA
8699 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8700 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8701 OMP_CLAUSE_DECL (c2)
8702 = unshare_expr (OMP_CLAUSE_DECL (c));
8703 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8704 OMP_CLAUSE_SIZE (c2)
8705 = TYPE_SIZE_UNIT (ptr_type_node);
8706 cl = scp ? *prev_list_p : c2;
8707 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8709 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8710 tree c3
8711 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8712 OMP_CLAUSE_MAP);
8713 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8714 OMP_CLAUSE_DECL (c3)
8715 = unshare_expr (OMP_CLAUSE_DECL (c4));
8716 OMP_CLAUSE_SIZE (c3)
8717 = TYPE_SIZE_UNIT (ptr_type_node);
8718 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8719 if (!scp)
8720 OMP_CLAUSE_CHAIN (c2) = c3;
8721 else
8722 cl = c3;
8724 if (scp)
8725 *scp = c2;
8726 if (sc == prev_list_p)
8728 *sc = cl;
8729 prev_list_p = NULL;
8731 else
8733 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8734 list_p = prev_list_p;
8735 prev_list_p = NULL;
8736 OMP_CLAUSE_CHAIN (c) = *sc;
8737 *sc = cl;
8738 continue;
8741 else if (*sc != c)
8743 *list_p = OMP_CLAUSE_CHAIN (c);
8744 OMP_CLAUSE_CHAIN (c) = *sc;
8745 *sc = c;
8746 continue;
8750 if (!remove
8751 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8752 && OMP_CLAUSE_CHAIN (c)
8753 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8754 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8755 == GOMP_MAP_ALWAYS_POINTER))
8756 prev_list_p = list_p;
8757 break;
8759 flags = GOVD_MAP | GOVD_EXPLICIT;
8760 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8761 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8762 flags |= GOVD_MAP_ALWAYS_TO;
8763 goto do_add;
8765 case OMP_CLAUSE_DEPEND:
8766 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8768 tree deps = OMP_CLAUSE_DECL (c);
8769 while (deps && TREE_CODE (deps) == TREE_LIST)
8771 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8772 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8773 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8774 pre_p, NULL, is_gimple_val, fb_rvalue);
8775 deps = TREE_CHAIN (deps);
8777 break;
8779 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8780 break;
8781 if (handled_depend_iterators == -1)
8782 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
8783 if (handled_depend_iterators)
8785 if (handled_depend_iterators == 2)
8786 remove = true;
8787 break;
8789 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8791 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8792 NULL, is_gimple_val, fb_rvalue);
8793 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8795 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8797 remove = true;
8798 break;
8800 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8801 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8802 is_gimple_val, fb_rvalue) == GS_ERROR)
8804 remove = true;
8805 break;
8807 break;
8809 case OMP_CLAUSE_TO:
8810 case OMP_CLAUSE_FROM:
8811 case OMP_CLAUSE__CACHE_:
8812 decl = OMP_CLAUSE_DECL (c);
8813 if (error_operand_p (decl))
8815 remove = true;
8816 break;
8818 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8819 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8820 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8821 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8822 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8824 remove = true;
8825 break;
8827 if (!DECL_P (decl))
8829 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8830 NULL, is_gimple_lvalue, fb_lvalue)
8831 == GS_ERROR)
8833 remove = true;
8834 break;
8836 break;
8838 goto do_notice;
8840 case OMP_CLAUSE_USE_DEVICE_PTR:
8841 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8842 goto do_add;
8843 case OMP_CLAUSE_IS_DEVICE_PTR:
8844 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8845 goto do_add;
8847 do_add:
8848 decl = OMP_CLAUSE_DECL (c);
8849 do_add_decl:
8850 if (error_operand_p (decl))
8852 remove = true;
8853 break;
8855 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8857 tree t = omp_member_access_dummy_var (decl);
8858 if (t)
8860 tree v = DECL_VALUE_EXPR (decl);
8861 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8862 if (outer_ctx)
8863 omp_notice_variable (outer_ctx, t, true);
8866 if (code == OACC_DATA
8867 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8868 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8869 flags |= GOVD_MAP_0LEN_ARRAY;
8870 omp_add_variable (ctx, decl, flags);
8871 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8872 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
8873 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8874 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8876 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8877 GOVD_LOCAL | GOVD_SEEN);
8878 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8879 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8880 find_decl_expr,
8881 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8882 NULL) == NULL_TREE)
8883 omp_add_variable (ctx,
8884 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8885 GOVD_LOCAL | GOVD_SEEN);
8886 gimplify_omp_ctxp = ctx;
8887 push_gimplify_context ();
8889 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8890 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8892 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8893 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8894 pop_gimplify_context
8895 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8896 push_gimplify_context ();
8897 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8898 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8899 pop_gimplify_context
8900 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8901 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8902 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8904 gimplify_omp_ctxp = outer_ctx;
8906 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8907 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8909 gimplify_omp_ctxp = ctx;
8910 push_gimplify_context ();
8911 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8913 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8914 NULL, NULL);
8915 TREE_SIDE_EFFECTS (bind) = 1;
8916 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8917 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8919 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8920 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8921 pop_gimplify_context
8922 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8923 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8925 gimplify_omp_ctxp = outer_ctx;
8927 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8928 && OMP_CLAUSE_LINEAR_STMT (c))
8930 gimplify_omp_ctxp = ctx;
8931 push_gimplify_context ();
8932 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8934 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8935 NULL, NULL);
8936 TREE_SIDE_EFFECTS (bind) = 1;
8937 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8938 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8940 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8941 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8942 pop_gimplify_context
8943 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8944 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8946 gimplify_omp_ctxp = outer_ctx;
8948 if (notice_outer)
8949 goto do_notice;
8950 break;
8952 case OMP_CLAUSE_COPYIN:
8953 case OMP_CLAUSE_COPYPRIVATE:
8954 decl = OMP_CLAUSE_DECL (c);
8955 if (error_operand_p (decl))
8957 remove = true;
8958 break;
8960 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8961 && !remove
8962 && !omp_check_private (ctx, decl, true))
8964 remove = true;
8965 if (is_global_var (decl))
8967 if (DECL_THREAD_LOCAL_P (decl))
8968 remove = false;
8969 else if (DECL_HAS_VALUE_EXPR_P (decl))
8971 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8973 if (value
8974 && DECL_P (value)
8975 && DECL_THREAD_LOCAL_P (value))
8976 remove = false;
8979 if (remove)
8980 error_at (OMP_CLAUSE_LOCATION (c),
8981 "copyprivate variable %qE is not threadprivate"
8982 " or private in outer context", DECL_NAME (decl));
8984 do_notice:
8985 if ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
8986 && outer_ctx
8987 && outer_ctx->region_type == ORT_COMBINED_PARALLEL
8988 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8989 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
8990 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE))
8992 splay_tree_node on
8993 = splay_tree_lookup (outer_ctx->variables,
8994 (splay_tree_key)decl);
8995 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
8997 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8998 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8999 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9000 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9001 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9002 == POINTER_TYPE))))
9003 omp_firstprivatize_variable (outer_ctx, decl);
9004 else
9005 omp_add_variable (outer_ctx, decl,
9006 GOVD_SEEN | GOVD_SHARED);
9007 omp_notice_variable (outer_ctx, decl, true);
9010 if (outer_ctx)
9011 omp_notice_variable (outer_ctx, decl, true);
9012 if (check_non_private
9013 && region_type == ORT_WORKSHARE
9014 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9015 || decl == OMP_CLAUSE_DECL (c)
9016 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9017 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9018 == ADDR_EXPR
9019 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9020 == POINTER_PLUS_EXPR
9021 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9022 (OMP_CLAUSE_DECL (c), 0), 0))
9023 == ADDR_EXPR)))))
9024 && omp_check_private (ctx, decl, false))
9026 error ("%s variable %qE is private in outer context",
9027 check_non_private, DECL_NAME (decl));
9028 remove = true;
9030 break;
9032 case OMP_CLAUSE_IF:
9033 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9034 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9036 const char *p[2];
9037 for (int i = 0; i < 2; i++)
9038 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9040 case VOID_CST: p[i] = "cancel"; break;
9041 case OMP_PARALLEL: p[i] = "parallel"; break;
9042 case OMP_SIMD: p[i] = "simd"; break;
9043 case OMP_TASK: p[i] = "task"; break;
9044 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9045 case OMP_TARGET_DATA: p[i] = "target data"; break;
9046 case OMP_TARGET: p[i] = "target"; break;
9047 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9048 case OMP_TARGET_ENTER_DATA:
9049 p[i] = "target enter data"; break;
9050 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9051 default: gcc_unreachable ();
9053 error_at (OMP_CLAUSE_LOCATION (c),
9054 "expected %qs %<if%> clause modifier rather than %qs",
9055 p[0], p[1]);
9056 remove = true;
9058 /* Fall through. */
9060 case OMP_CLAUSE_FINAL:
9061 OMP_CLAUSE_OPERAND (c, 0)
9062 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9063 /* Fall through. */
9065 case OMP_CLAUSE_SCHEDULE:
9066 case OMP_CLAUSE_NUM_THREADS:
9067 case OMP_CLAUSE_NUM_TEAMS:
9068 case OMP_CLAUSE_THREAD_LIMIT:
9069 case OMP_CLAUSE_DIST_SCHEDULE:
9070 case OMP_CLAUSE_DEVICE:
9071 case OMP_CLAUSE_PRIORITY:
9072 case OMP_CLAUSE_GRAINSIZE:
9073 case OMP_CLAUSE_NUM_TASKS:
9074 case OMP_CLAUSE_HINT:
9075 case OMP_CLAUSE_ASYNC:
9076 case OMP_CLAUSE_WAIT:
9077 case OMP_CLAUSE_NUM_GANGS:
9078 case OMP_CLAUSE_NUM_WORKERS:
9079 case OMP_CLAUSE_VECTOR_LENGTH:
9080 case OMP_CLAUSE_WORKER:
9081 case OMP_CLAUSE_VECTOR:
9082 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9083 is_gimple_val, fb_rvalue) == GS_ERROR)
9084 remove = true;
9085 break;
9087 case OMP_CLAUSE_GANG:
9088 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9089 is_gimple_val, fb_rvalue) == GS_ERROR)
9090 remove = true;
9091 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9092 is_gimple_val, fb_rvalue) == GS_ERROR)
9093 remove = true;
9094 break;
9096 case OMP_CLAUSE_NOWAIT:
9097 nowait = 1;
9098 break;
9100 case OMP_CLAUSE_ORDERED:
9101 case OMP_CLAUSE_UNTIED:
9102 case OMP_CLAUSE_COLLAPSE:
9103 case OMP_CLAUSE_TILE:
9104 case OMP_CLAUSE_AUTO:
9105 case OMP_CLAUSE_SEQ:
9106 case OMP_CLAUSE_INDEPENDENT:
9107 case OMP_CLAUSE_MERGEABLE:
9108 case OMP_CLAUSE_PROC_BIND:
9109 case OMP_CLAUSE_SAFELEN:
9110 case OMP_CLAUSE_SIMDLEN:
9111 case OMP_CLAUSE_NOGROUP:
9112 case OMP_CLAUSE_THREADS:
9113 case OMP_CLAUSE_SIMD:
9114 case OMP_CLAUSE_IF_PRESENT:
9115 case OMP_CLAUSE_FINALIZE:
9116 break;
9118 case OMP_CLAUSE_DEFAULTMAP:
9119 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9120 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9122 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9123 gdmkmin = GDMK_SCALAR;
9124 gdmkmax = GDMK_POINTER;
9125 break;
9126 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9127 gdmkmin = gdmkmax = GDMK_SCALAR;
9128 break;
9129 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9130 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9131 break;
9132 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9133 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9134 break;
9135 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9136 gdmkmin = gdmkmax = GDMK_POINTER;
9137 break;
9138 default:
9139 gcc_unreachable ();
9141 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9142 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9144 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9145 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9146 break;
9147 case OMP_CLAUSE_DEFAULTMAP_TO:
9148 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9149 break;
9150 case OMP_CLAUSE_DEFAULTMAP_FROM:
9151 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9152 break;
9153 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9154 ctx->defaultmap[gdmk] = GOVD_MAP;
9155 break;
9156 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9157 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9158 break;
9159 case OMP_CLAUSE_DEFAULTMAP_NONE:
9160 ctx->defaultmap[gdmk] = 0;
9161 break;
9162 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9163 switch (gdmk)
9165 case GDMK_SCALAR:
9166 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9167 break;
9168 case GDMK_AGGREGATE:
9169 case GDMK_ALLOCATABLE:
9170 ctx->defaultmap[gdmk] = GOVD_MAP;
9171 break;
9172 case GDMK_POINTER:
9173 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9174 break;
9175 default:
9176 gcc_unreachable ();
9178 break;
9179 default:
9180 gcc_unreachable ();
9182 break;
9184 case OMP_CLAUSE_ALIGNED:
9185 decl = OMP_CLAUSE_DECL (c);
9186 if (error_operand_p (decl))
9188 remove = true;
9189 break;
9191 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9192 is_gimple_val, fb_rvalue) == GS_ERROR)
9194 remove = true;
9195 break;
9197 if (!is_global_var (decl)
9198 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9199 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9200 break;
9202 case OMP_CLAUSE_NONTEMPORAL:
9203 decl = OMP_CLAUSE_DECL (c);
9204 if (error_operand_p (decl))
9206 remove = true;
9207 break;
9209 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9210 break;
9212 case OMP_CLAUSE_DEFAULT:
9213 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9214 break;
9216 default:
9217 gcc_unreachable ();
9220 if (code == OACC_DATA
9221 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9222 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9223 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9224 remove = true;
9225 if (remove)
9226 *list_p = OMP_CLAUSE_CHAIN (c);
9227 else
9228 list_p = &OMP_CLAUSE_CHAIN (c);
9231 gimplify_omp_ctxp = ctx;
9232 if (struct_map_to_clause)
9233 delete struct_map_to_clause;
9236 /* Return true if DECL is a candidate for shared to firstprivate
9237 optimization. We only consider non-addressable scalars, not
9238 too big, and not references. */
9240 static bool
9241 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
9243 if (TREE_ADDRESSABLE (decl))
9244 return false;
9245 tree type = TREE_TYPE (decl);
9246 if (!is_gimple_reg_type (type)
9247 || TREE_CODE (type) == REFERENCE_TYPE
9248 || TREE_ADDRESSABLE (type))
9249 return false;
9250 /* Don't optimize too large decls, as each thread/task will have
9251 its own. */
9252 HOST_WIDE_INT len = int_size_in_bytes (type);
9253 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
9254 return false;
9255 if (lang_hooks.decls.omp_privatize_by_reference (decl))
9256 return false;
9257 return true;
9260 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9261 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9262 GOVD_WRITTEN in outer contexts. */
9264 static void
9265 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
9267 for (; ctx; ctx = ctx->outer_context)
9269 splay_tree_node n = splay_tree_lookup (ctx->variables,
9270 (splay_tree_key) decl);
9271 if (n == NULL)
9272 continue;
9273 else if (n->value & GOVD_SHARED)
9275 n->value |= GOVD_WRITTEN;
9276 return;
9278 else if (n->value & GOVD_DATA_SHARE_CLASS)
9279 return;
9283 /* Helper callback for walk_gimple_seq to discover possible stores
9284 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9285 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9286 for those. */
9288 static tree
9289 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
9291 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
9293 *walk_subtrees = 0;
9294 if (!wi->is_lhs)
9295 return NULL_TREE;
9297 tree op = *tp;
9300 if (handled_component_p (op))
9301 op = TREE_OPERAND (op, 0);
9302 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
9303 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
9304 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
9305 else
9306 break;
9308 while (1);
9309 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
9310 return NULL_TREE;
9312 omp_mark_stores (gimplify_omp_ctxp, op);
9313 return NULL_TREE;
9316 /* Helper callback for walk_gimple_seq to discover possible stores
9317 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9318 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9319 for those. */
9321 static tree
9322 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
9323 bool *handled_ops_p,
9324 struct walk_stmt_info *wi)
9326 gimple *stmt = gsi_stmt (*gsi_p);
9327 switch (gimple_code (stmt))
9329 /* Don't recurse on OpenMP constructs for which
9330 gimplify_adjust_omp_clauses already handled the bodies,
9331 except handle gimple_omp_for_pre_body. */
9332 case GIMPLE_OMP_FOR:
9333 *handled_ops_p = true;
9334 if (gimple_omp_for_pre_body (stmt))
9335 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9336 omp_find_stores_stmt, omp_find_stores_op, wi);
9337 break;
9338 case GIMPLE_OMP_PARALLEL:
9339 case GIMPLE_OMP_TASK:
9340 case GIMPLE_OMP_SECTIONS:
9341 case GIMPLE_OMP_SINGLE:
9342 case GIMPLE_OMP_TARGET:
9343 case GIMPLE_OMP_TEAMS:
9344 case GIMPLE_OMP_CRITICAL:
9345 *handled_ops_p = true;
9346 break;
9347 default:
9348 break;
9350 return NULL_TREE;
9353 struct gimplify_adjust_omp_clauses_data
9355 tree *list_p;
9356 gimple_seq *pre_p;
9359 /* For all variables that were not actually used within the context,
9360 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9362 static int
9363 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
9365 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
9366 gimple_seq *pre_p
9367 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
9368 tree decl = (tree) n->key;
9369 unsigned flags = n->value;
9370 enum omp_clause_code code;
9371 tree clause;
9372 bool private_debug;
9374 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
9375 return 0;
9376 if ((flags & GOVD_SEEN) == 0)
9377 return 0;
9378 if (flags & GOVD_DEBUG_PRIVATE)
9380 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
9381 private_debug = true;
9383 else if (flags & GOVD_MAP)
9384 private_debug = false;
9385 else
9386 private_debug
9387 = lang_hooks.decls.omp_private_debug_clause (decl,
9388 !!(flags & GOVD_SHARED));
9389 if (private_debug)
9390 code = OMP_CLAUSE_PRIVATE;
9391 else if (flags & GOVD_MAP)
9393 code = OMP_CLAUSE_MAP;
9394 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9395 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9397 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
9398 return 0;
9401 else if (flags & GOVD_SHARED)
9403 if (is_global_var (decl))
9405 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9406 while (ctx != NULL)
9408 splay_tree_node on
9409 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9410 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
9411 | GOVD_PRIVATE | GOVD_REDUCTION
9412 | GOVD_LINEAR | GOVD_MAP)) != 0)
9413 break;
9414 ctx = ctx->outer_context;
9416 if (ctx == NULL)
9417 return 0;
9419 code = OMP_CLAUSE_SHARED;
9421 else if (flags & GOVD_PRIVATE)
9422 code = OMP_CLAUSE_PRIVATE;
9423 else if (flags & GOVD_FIRSTPRIVATE)
9425 code = OMP_CLAUSE_FIRSTPRIVATE;
9426 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
9427 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9428 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9430 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9431 "%<target%> construct", decl);
9432 return 0;
9435 else if (flags & GOVD_LASTPRIVATE)
9436 code = OMP_CLAUSE_LASTPRIVATE;
9437 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
9438 return 0;
9439 else
9440 gcc_unreachable ();
9442 if (((flags & GOVD_LASTPRIVATE)
9443 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
9444 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9445 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9447 tree chain = *list_p;
9448 clause = build_omp_clause (input_location, code);
9449 OMP_CLAUSE_DECL (clause) = decl;
9450 OMP_CLAUSE_CHAIN (clause) = chain;
9451 if (private_debug)
9452 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
9453 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
9454 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
9455 else if (code == OMP_CLAUSE_SHARED
9456 && (flags & GOVD_WRITTEN) == 0
9457 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9458 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
9459 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
9460 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
9461 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
9463 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
9464 OMP_CLAUSE_DECL (nc) = decl;
9465 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9466 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
9467 OMP_CLAUSE_DECL (clause)
9468 = build_simple_mem_ref_loc (input_location, decl);
9469 OMP_CLAUSE_DECL (clause)
9470 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
9471 build_int_cst (build_pointer_type (char_type_node), 0));
9472 OMP_CLAUSE_SIZE (clause) = size_zero_node;
9473 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9474 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
9475 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
9476 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9477 OMP_CLAUSE_CHAIN (nc) = chain;
9478 OMP_CLAUSE_CHAIN (clause) = nc;
9479 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9480 gimplify_omp_ctxp = ctx->outer_context;
9481 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
9482 pre_p, NULL, is_gimple_val, fb_rvalue);
9483 gimplify_omp_ctxp = ctx;
9485 else if (code == OMP_CLAUSE_MAP)
9487 int kind;
9488 /* Not all combinations of these GOVD_MAP flags are actually valid. */
9489 switch (flags & (GOVD_MAP_TO_ONLY
9490 | GOVD_MAP_FORCE
9491 | GOVD_MAP_FORCE_PRESENT
9492 | GOVD_MAP_ALLOC_ONLY
9493 | GOVD_MAP_FROM_ONLY))
9495 case 0:
9496 kind = GOMP_MAP_TOFROM;
9497 break;
9498 case GOVD_MAP_FORCE:
9499 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
9500 break;
9501 case GOVD_MAP_TO_ONLY:
9502 kind = GOMP_MAP_TO;
9503 break;
9504 case GOVD_MAP_FROM_ONLY:
9505 kind = GOMP_MAP_FROM;
9506 break;
9507 case GOVD_MAP_ALLOC_ONLY:
9508 kind = GOMP_MAP_ALLOC;
9509 break;
9510 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
9511 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
9512 break;
9513 case GOVD_MAP_FORCE_PRESENT:
9514 kind = GOMP_MAP_FORCE_PRESENT;
9515 break;
9516 default:
9517 gcc_unreachable ();
9519 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
9520 if (DECL_SIZE (decl)
9521 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9523 tree decl2 = DECL_VALUE_EXPR (decl);
9524 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9525 decl2 = TREE_OPERAND (decl2, 0);
9526 gcc_assert (DECL_P (decl2));
9527 tree mem = build_simple_mem_ref (decl2);
9528 OMP_CLAUSE_DECL (clause) = mem;
9529 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9530 if (gimplify_omp_ctxp->outer_context)
9532 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9533 omp_notice_variable (ctx, decl2, true);
9534 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
9536 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
9537 OMP_CLAUSE_MAP);
9538 OMP_CLAUSE_DECL (nc) = decl;
9539 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9540 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
9541 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9542 else
9543 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9544 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
9545 OMP_CLAUSE_CHAIN (clause) = nc;
9547 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
9548 && lang_hooks.decls.omp_privatize_by_reference (decl))
9550 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
9551 OMP_CLAUSE_SIZE (clause)
9552 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
9553 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9554 gimplify_omp_ctxp = ctx->outer_context;
9555 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
9556 pre_p, NULL, is_gimple_val, fb_rvalue);
9557 gimplify_omp_ctxp = ctx;
9558 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
9559 OMP_CLAUSE_MAP);
9560 OMP_CLAUSE_DECL (nc) = decl;
9561 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9562 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
9563 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
9564 OMP_CLAUSE_CHAIN (clause) = nc;
9566 else
9567 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
9569 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
9571 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
9572 OMP_CLAUSE_DECL (nc) = decl;
9573 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
9574 OMP_CLAUSE_CHAIN (nc) = chain;
9575 OMP_CLAUSE_CHAIN (clause) = nc;
9576 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9577 gimplify_omp_ctxp = ctx->outer_context;
9578 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9579 gimplify_omp_ctxp = ctx;
9581 *list_p = clause;
9582 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9583 gimplify_omp_ctxp = ctx->outer_context;
9584 lang_hooks.decls.omp_finish_clause (clause, pre_p);
9585 if (gimplify_omp_ctxp)
9586 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
9587 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
9588 && DECL_P (OMP_CLAUSE_SIZE (clause)))
9589 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
9590 true);
9591 gimplify_omp_ctxp = ctx;
9592 return 0;
9595 static void
9596 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
9597 enum tree_code code)
9599 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9600 tree c, decl;
9602 if (body)
9604 struct gimplify_omp_ctx *octx;
9605 for (octx = ctx; octx; octx = octx->outer_context)
9606 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
9607 break;
9608 if (octx)
9610 struct walk_stmt_info wi;
9611 memset (&wi, 0, sizeof (wi));
9612 walk_gimple_seq (body, omp_find_stores_stmt,
9613 omp_find_stores_op, &wi);
9616 while ((c = *list_p) != NULL)
9618 splay_tree_node n;
9619 bool remove = false;
9621 switch (OMP_CLAUSE_CODE (c))
9623 case OMP_CLAUSE_FIRSTPRIVATE:
9624 if ((ctx->region_type & ORT_TARGET)
9625 && (ctx->region_type & ORT_ACC) == 0
9626 && TYPE_ATOMIC (strip_array_types
9627 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
9629 error_at (OMP_CLAUSE_LOCATION (c),
9630 "%<_Atomic%> %qD in %<firstprivate%> clause on "
9631 "%<target%> construct", OMP_CLAUSE_DECL (c));
9632 remove = true;
9633 break;
9635 /* FALLTHRU */
9636 case OMP_CLAUSE_PRIVATE:
9637 case OMP_CLAUSE_SHARED:
9638 case OMP_CLAUSE_LINEAR:
9639 decl = OMP_CLAUSE_DECL (c);
9640 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9641 remove = !(n->value & GOVD_SEEN);
9642 if (! remove)
9644 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
9645 if ((n->value & GOVD_DEBUG_PRIVATE)
9646 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
9648 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
9649 || ((n->value & GOVD_DATA_SHARE_CLASS)
9650 == GOVD_SHARED));
9651 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
9652 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
9654 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9655 && (n->value & GOVD_WRITTEN) == 0
9656 && DECL_P (decl)
9657 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9658 OMP_CLAUSE_SHARED_READONLY (c) = 1;
9659 else if (DECL_P (decl)
9660 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9661 && (n->value & GOVD_WRITTEN) != 0)
9662 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9663 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
9664 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9665 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9667 break;
9669 case OMP_CLAUSE_LASTPRIVATE:
9670 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
9671 accurately reflect the presence of a FIRSTPRIVATE clause. */
9672 decl = OMP_CLAUSE_DECL (c);
9673 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9674 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
9675 = (n->value & GOVD_FIRSTPRIVATE) != 0;
9676 if (code == OMP_DISTRIBUTE
9677 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9679 remove = true;
9680 error_at (OMP_CLAUSE_LOCATION (c),
9681 "same variable used in %<firstprivate%> and "
9682 "%<lastprivate%> clauses on %<distribute%> "
9683 "construct");
9685 if (!remove
9686 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9687 && DECL_P (decl)
9688 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9689 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9690 break;
9692 case OMP_CLAUSE_ALIGNED:
9693 decl = OMP_CLAUSE_DECL (c);
9694 if (!is_global_var (decl))
9696 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9697 remove = n == NULL || !(n->value & GOVD_SEEN);
9698 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9700 struct gimplify_omp_ctx *octx;
9701 if (n != NULL
9702 && (n->value & (GOVD_DATA_SHARE_CLASS
9703 & ~GOVD_FIRSTPRIVATE)))
9704 remove = true;
9705 else
9706 for (octx = ctx->outer_context; octx;
9707 octx = octx->outer_context)
9709 n = splay_tree_lookup (octx->variables,
9710 (splay_tree_key) decl);
9711 if (n == NULL)
9712 continue;
9713 if (n->value & GOVD_LOCAL)
9714 break;
9715 /* We have to avoid assigning a shared variable
9716 to itself when trying to add
9717 __builtin_assume_aligned. */
9718 if (n->value & GOVD_SHARED)
9720 remove = true;
9721 break;
9726 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
9728 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9729 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9730 remove = true;
9732 break;
9734 case OMP_CLAUSE_NONTEMPORAL:
9735 decl = OMP_CLAUSE_DECL (c);
9736 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9737 remove = n == NULL || !(n->value & GOVD_SEEN);
9738 break;
9740 case OMP_CLAUSE_MAP:
9741 if (code == OMP_TARGET_EXIT_DATA
9742 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
9744 remove = true;
9745 break;
9747 decl = OMP_CLAUSE_DECL (c);
9748 /* Data clauses associated with acc parallel reductions must be
9749 compatible with present_or_copy. Warn and adjust the clause
9750 if that is not the case. */
9751 if (ctx->region_type == ORT_ACC_PARALLEL)
9753 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9754 n = NULL;
9756 if (DECL_P (t))
9757 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9759 if (n && (n->value & GOVD_REDUCTION))
9761 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9763 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9764 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9765 && kind != GOMP_MAP_FORCE_PRESENT
9766 && kind != GOMP_MAP_POINTER)
9768 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9769 "incompatible data clause with reduction "
9770 "on %qE; promoting to present_or_copy",
9771 DECL_NAME (t));
9772 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9776 if (!DECL_P (decl))
9778 if ((ctx->region_type & ORT_TARGET) != 0
9779 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9781 if (TREE_CODE (decl) == INDIRECT_REF
9782 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9783 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9784 == REFERENCE_TYPE))
9785 decl = TREE_OPERAND (decl, 0);
9786 if (TREE_CODE (decl) == COMPONENT_REF)
9788 while (TREE_CODE (decl) == COMPONENT_REF)
9789 decl = TREE_OPERAND (decl, 0);
9790 if (DECL_P (decl))
9792 n = splay_tree_lookup (ctx->variables,
9793 (splay_tree_key) decl);
9794 if (!(n->value & GOVD_SEEN))
9795 remove = true;
9799 break;
9801 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9802 if ((ctx->region_type & ORT_TARGET) != 0
9803 && !(n->value & GOVD_SEEN)
9804 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9805 && (!is_global_var (decl)
9806 || !lookup_attribute ("omp declare target link",
9807 DECL_ATTRIBUTES (decl))))
9809 remove = true;
9810 /* For struct element mapping, if struct is never referenced
9811 in target block and none of the mapping has always modifier,
9812 remove all the struct element mappings, which immediately
9813 follow the GOMP_MAP_STRUCT map clause. */
9814 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9816 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9817 while (cnt--)
9818 OMP_CLAUSE_CHAIN (c)
9819 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9822 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9823 && code == OMP_TARGET_EXIT_DATA)
9824 remove = true;
9825 else if (DECL_SIZE (decl)
9826 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9827 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9828 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9829 && (OMP_CLAUSE_MAP_KIND (c)
9830 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9832 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9833 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9834 INTEGER_CST. */
9835 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9837 tree decl2 = DECL_VALUE_EXPR (decl);
9838 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9839 decl2 = TREE_OPERAND (decl2, 0);
9840 gcc_assert (DECL_P (decl2));
9841 tree mem = build_simple_mem_ref (decl2);
9842 OMP_CLAUSE_DECL (c) = mem;
9843 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9844 if (ctx->outer_context)
9846 omp_notice_variable (ctx->outer_context, decl2, true);
9847 omp_notice_variable (ctx->outer_context,
9848 OMP_CLAUSE_SIZE (c), true);
9850 if (((ctx->region_type & ORT_TARGET) != 0
9851 || !ctx->target_firstprivatize_array_bases)
9852 && ((n->value & GOVD_SEEN) == 0
9853 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9855 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9856 OMP_CLAUSE_MAP);
9857 OMP_CLAUSE_DECL (nc) = decl;
9858 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9859 if (ctx->target_firstprivatize_array_bases)
9860 OMP_CLAUSE_SET_MAP_KIND (nc,
9861 GOMP_MAP_FIRSTPRIVATE_POINTER);
9862 else
9863 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9864 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9865 OMP_CLAUSE_CHAIN (c) = nc;
9866 c = nc;
9869 else
9871 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9872 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9873 gcc_assert ((n->value & GOVD_SEEN) == 0
9874 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9875 == 0));
9877 break;
9879 case OMP_CLAUSE_TO:
9880 case OMP_CLAUSE_FROM:
9881 case OMP_CLAUSE__CACHE_:
9882 decl = OMP_CLAUSE_DECL (c);
9883 if (!DECL_P (decl))
9884 break;
9885 if (DECL_SIZE (decl)
9886 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9888 tree decl2 = DECL_VALUE_EXPR (decl);
9889 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9890 decl2 = TREE_OPERAND (decl2, 0);
9891 gcc_assert (DECL_P (decl2));
9892 tree mem = build_simple_mem_ref (decl2);
9893 OMP_CLAUSE_DECL (c) = mem;
9894 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9895 if (ctx->outer_context)
9897 omp_notice_variable (ctx->outer_context, decl2, true);
9898 omp_notice_variable (ctx->outer_context,
9899 OMP_CLAUSE_SIZE (c), true);
9902 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9903 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9904 break;
9906 case OMP_CLAUSE_REDUCTION:
9907 case OMP_CLAUSE_IN_REDUCTION:
9908 case OMP_CLAUSE_TASK_REDUCTION:
9909 decl = OMP_CLAUSE_DECL (c);
9910 /* OpenACC reductions need a present_or_copy data clause.
9911 Add one if necessary. Emit error when the reduction is private. */
9912 if (ctx->region_type == ORT_ACC_PARALLEL)
9914 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9915 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9917 remove = true;
9918 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9919 "reduction on %qE", DECL_NAME (decl));
9921 else if ((n->value & GOVD_MAP) == 0)
9923 tree next = OMP_CLAUSE_CHAIN (c);
9924 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9925 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9926 OMP_CLAUSE_DECL (nc) = decl;
9927 OMP_CLAUSE_CHAIN (c) = nc;
9928 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9929 while (1)
9931 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9932 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9933 break;
9934 nc = OMP_CLAUSE_CHAIN (nc);
9936 OMP_CLAUSE_CHAIN (nc) = next;
9937 n->value |= GOVD_MAP;
9940 if (DECL_P (decl)
9941 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9942 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9943 break;
9944 case OMP_CLAUSE_COPYIN:
9945 case OMP_CLAUSE_COPYPRIVATE:
9946 case OMP_CLAUSE_IF:
9947 case OMP_CLAUSE_NUM_THREADS:
9948 case OMP_CLAUSE_NUM_TEAMS:
9949 case OMP_CLAUSE_THREAD_LIMIT:
9950 case OMP_CLAUSE_DIST_SCHEDULE:
9951 case OMP_CLAUSE_DEVICE:
9952 case OMP_CLAUSE_SCHEDULE:
9953 case OMP_CLAUSE_NOWAIT:
9954 case OMP_CLAUSE_ORDERED:
9955 case OMP_CLAUSE_DEFAULT:
9956 case OMP_CLAUSE_UNTIED:
9957 case OMP_CLAUSE_COLLAPSE:
9958 case OMP_CLAUSE_FINAL:
9959 case OMP_CLAUSE_MERGEABLE:
9960 case OMP_CLAUSE_PROC_BIND:
9961 case OMP_CLAUSE_SAFELEN:
9962 case OMP_CLAUSE_SIMDLEN:
9963 case OMP_CLAUSE_DEPEND:
9964 case OMP_CLAUSE_PRIORITY:
9965 case OMP_CLAUSE_GRAINSIZE:
9966 case OMP_CLAUSE_NUM_TASKS:
9967 case OMP_CLAUSE_NOGROUP:
9968 case OMP_CLAUSE_THREADS:
9969 case OMP_CLAUSE_SIMD:
9970 case OMP_CLAUSE_HINT:
9971 case OMP_CLAUSE_DEFAULTMAP:
9972 case OMP_CLAUSE_USE_DEVICE_PTR:
9973 case OMP_CLAUSE_IS_DEVICE_PTR:
9974 case OMP_CLAUSE_ASYNC:
9975 case OMP_CLAUSE_WAIT:
9976 case OMP_CLAUSE_INDEPENDENT:
9977 case OMP_CLAUSE_NUM_GANGS:
9978 case OMP_CLAUSE_NUM_WORKERS:
9979 case OMP_CLAUSE_VECTOR_LENGTH:
9980 case OMP_CLAUSE_GANG:
9981 case OMP_CLAUSE_WORKER:
9982 case OMP_CLAUSE_VECTOR:
9983 case OMP_CLAUSE_AUTO:
9984 case OMP_CLAUSE_SEQ:
9985 case OMP_CLAUSE_TILE:
9986 case OMP_CLAUSE_IF_PRESENT:
9987 case OMP_CLAUSE_FINALIZE:
9988 break;
9990 default:
9991 gcc_unreachable ();
9994 if (remove)
9995 *list_p = OMP_CLAUSE_CHAIN (c);
9996 else
9997 list_p = &OMP_CLAUSE_CHAIN (c);
10000 /* Add in any implicit data sharing. */
10001 struct gimplify_adjust_omp_clauses_data data;
10002 data.list_p = list_p;
10003 data.pre_p = pre_p;
10004 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
10006 gimplify_omp_ctxp = ctx->outer_context;
10007 delete_omp_context (ctx);
10010 /* Gimplify OACC_CACHE. */
10012 static void
10013 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
10015 tree expr = *expr_p;
10017 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
10018 OACC_CACHE);
10019 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
10020 OACC_CACHE);
10022 /* TODO: Do something sensible with this information. */
10024 *expr_p = NULL_TREE;
10027 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10028 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10029 kind. The entry kind will replace the one in CLAUSE, while the exit
10030 kind will be used in a new omp_clause and returned to the caller. */
10032 static tree
10033 gimplify_oacc_declare_1 (tree clause)
10035 HOST_WIDE_INT kind, new_op;
10036 bool ret = false;
10037 tree c = NULL;
10039 kind = OMP_CLAUSE_MAP_KIND (clause);
10041 switch (kind)
10043 case GOMP_MAP_ALLOC:
10044 new_op = GOMP_MAP_RELEASE;
10045 ret = true;
10046 break;
10048 case GOMP_MAP_FROM:
10049 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
10050 new_op = GOMP_MAP_FROM;
10051 ret = true;
10052 break;
10054 case GOMP_MAP_TOFROM:
10055 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
10056 new_op = GOMP_MAP_FROM;
10057 ret = true;
10058 break;
10060 case GOMP_MAP_DEVICE_RESIDENT:
10061 case GOMP_MAP_FORCE_DEVICEPTR:
10062 case GOMP_MAP_FORCE_PRESENT:
10063 case GOMP_MAP_LINK:
10064 case GOMP_MAP_POINTER:
10065 case GOMP_MAP_TO:
10066 break;
10068 default:
10069 gcc_unreachable ();
10070 break;
10073 if (ret)
10075 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
10076 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
10077 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
10080 return c;
10083 /* Gimplify OACC_DECLARE. */
10085 static void
10086 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
10088 tree expr = *expr_p;
10089 gomp_target *stmt;
10090 tree clauses, t, decl;
10092 clauses = OACC_DECLARE_CLAUSES (expr);
10094 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
10095 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
10097 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
10099 decl = OMP_CLAUSE_DECL (t);
10101 if (TREE_CODE (decl) == MEM_REF)
10102 decl = TREE_OPERAND (decl, 0);
10104 if (VAR_P (decl) && !is_oacc_declared (decl))
10106 tree attr = get_identifier ("oacc declare target");
10107 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
10108 DECL_ATTRIBUTES (decl));
10111 if (VAR_P (decl)
10112 && !is_global_var (decl)
10113 && DECL_CONTEXT (decl) == current_function_decl)
10115 tree c = gimplify_oacc_declare_1 (t);
10116 if (c)
10118 if (oacc_declare_returns == NULL)
10119 oacc_declare_returns = new hash_map<tree, tree>;
10121 oacc_declare_returns->put (decl, c);
10125 if (gimplify_omp_ctxp)
10126 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
10129 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
10130 clauses);
10132 gimplify_seq_add_stmt (pre_p, stmt);
10134 *expr_p = NULL_TREE;
10137 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
10138 gimplification of the body, as well as scanning the body for used
10139 variables. We need to do this scan now, because variable-sized
10140 decls will be decomposed during gimplification. */
10142 static void
10143 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
10145 tree expr = *expr_p;
10146 gimple *g;
10147 gimple_seq body = NULL;
10149 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
10150 OMP_PARALLEL_COMBINED (expr)
10151 ? ORT_COMBINED_PARALLEL
10152 : ORT_PARALLEL, OMP_PARALLEL);
10154 push_gimplify_context ();
10156 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
10157 if (gimple_code (g) == GIMPLE_BIND)
10158 pop_gimplify_context (g);
10159 else
10160 pop_gimplify_context (NULL);
10162 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
10163 OMP_PARALLEL);
10165 g = gimple_build_omp_parallel (body,
10166 OMP_PARALLEL_CLAUSES (expr),
10167 NULL_TREE, NULL_TREE);
10168 if (OMP_PARALLEL_COMBINED (expr))
10169 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
10170 gimplify_seq_add_stmt (pre_p, g);
10171 *expr_p = NULL_TREE;
10174 /* Gimplify the contents of an OMP_TASK statement. This involves
10175 gimplification of the body, as well as scanning the body for used
10176 variables. We need to do this scan now, because variable-sized
10177 decls will be decomposed during gimplification. */
10179 static void
10180 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
10182 tree expr = *expr_p;
10183 gimple *g;
10184 gimple_seq body = NULL;
10186 if (OMP_TASK_BODY (expr) == NULL_TREE)
10187 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10188 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10189 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
10191 error_at (OMP_CLAUSE_LOCATION (c),
10192 "%<mutexinoutset%> kind in %<depend%> clause on a "
10193 "%<taskwait%> construct");
10194 break;
10197 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
10198 omp_find_clause (OMP_TASK_CLAUSES (expr),
10199 OMP_CLAUSE_UNTIED)
10200 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
10202 if (OMP_TASK_BODY (expr))
10204 push_gimplify_context ();
10206 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
10207 if (gimple_code (g) == GIMPLE_BIND)
10208 pop_gimplify_context (g);
10209 else
10210 pop_gimplify_context (NULL);
10213 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
10214 OMP_TASK);
10216 g = gimple_build_omp_task (body,
10217 OMP_TASK_CLAUSES (expr),
10218 NULL_TREE, NULL_TREE,
10219 NULL_TREE, NULL_TREE, NULL_TREE);
10220 if (OMP_TASK_BODY (expr) == NULL_TREE)
10221 gimple_omp_task_set_taskwait_p (g, true);
10222 gimplify_seq_add_stmt (pre_p, g);
10223 *expr_p = NULL_TREE;
10226 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
10227 with non-NULL OMP_FOR_INIT. Also, fill in pdata array,
10228 pdata[0] non-NULL if there is anything non-trivial in between, pdata[1]
10229 is address of OMP_PARALLEL in between if any, pdata[2] is address of
10230 OMP_FOR in between if any and pdata[3] is address of the inner
10231 OMP_FOR/OMP_SIMD. */
10233 static tree
10234 find_combined_omp_for (tree *tp, int *walk_subtrees, void *data)
10236 tree **pdata = (tree **) data;
10237 *walk_subtrees = 0;
10238 switch (TREE_CODE (*tp))
10240 case OMP_FOR:
10241 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10243 pdata[3] = tp;
10244 return *tp;
10246 pdata[2] = tp;
10247 *walk_subtrees = 1;
10248 break;
10249 case OMP_SIMD:
10250 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10252 pdata[3] = tp;
10253 return *tp;
10255 break;
10256 case BIND_EXPR:
10257 if (BIND_EXPR_VARS (*tp)
10258 || (BIND_EXPR_BLOCK (*tp)
10259 && BLOCK_VARS (BIND_EXPR_BLOCK (*tp))))
10260 pdata[0] = tp;
10261 *walk_subtrees = 1;
10262 break;
10263 case STATEMENT_LIST:
10264 if (!tsi_one_before_end_p (tsi_start (*tp)))
10265 pdata[0] = tp;
10266 *walk_subtrees = 1;
10267 break;
10268 case TRY_FINALLY_EXPR:
10269 pdata[0] = tp;
10270 *walk_subtrees = 1;
10271 break;
10272 case OMP_PARALLEL:
10273 pdata[1] = tp;
10274 *walk_subtrees = 1;
10275 break;
10276 default:
10277 break;
10279 return NULL_TREE;
10282 /* Gimplify the gross structure of an OMP_FOR statement. */
10284 static enum gimplify_status
10285 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
10287 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
10288 enum gimplify_status ret = GS_ALL_DONE;
10289 enum gimplify_status tret;
10290 gomp_for *gfor;
10291 gimple_seq for_body, for_pre_body;
10292 int i;
10293 bitmap has_decl_expr = NULL;
10294 enum omp_region_type ort = ORT_WORKSHARE;
10296 orig_for_stmt = for_stmt = *expr_p;
10298 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
10300 tree *data[4] = { NULL, NULL, NULL, NULL };
10301 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
10302 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
10303 find_combined_omp_for, data, NULL);
10304 if (inner_for_stmt == NULL_TREE)
10306 gcc_assert (seen_error ());
10307 *expr_p = NULL_TREE;
10308 return GS_ERROR;
10310 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
10312 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
10313 &OMP_FOR_PRE_BODY (for_stmt));
10314 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
10316 if (OMP_FOR_PRE_BODY (inner_for_stmt))
10318 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
10319 &OMP_FOR_PRE_BODY (for_stmt));
10320 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
10323 if (data[0])
10325 /* We have some statements or variable declarations in between
10326 the composite construct directives. Move them around the
10327 inner_for_stmt. */
10328 data[0] = expr_p;
10329 for (i = 0; i < 3; i++)
10330 if (data[i])
10332 tree t = *data[i];
10333 if (i < 2 && data[i + 1] == &OMP_BODY (t))
10334 data[i + 1] = data[i];
10335 *data[i] = OMP_BODY (t);
10336 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
10337 NULL_TREE, make_node (BLOCK));
10338 OMP_BODY (t) = body;
10339 append_to_statement_list_force (inner_for_stmt,
10340 &BIND_EXPR_BODY (body));
10341 *data[3] = t;
10342 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
10343 gcc_assert (*data[3] == inner_for_stmt);
10345 return GS_OK;
10348 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
10349 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
10350 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10351 i)) == TREE_LIST
10352 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10353 i)))
10355 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
10356 /* Class iterators aren't allowed on OMP_SIMD, so the only
10357 case we need to solve is distribute parallel for. */
10358 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
10359 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
10360 && data[1]);
10361 tree orig_decl = TREE_PURPOSE (orig);
10362 tree last = TREE_VALUE (orig);
10363 tree *pc;
10364 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
10365 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
10366 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
10367 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
10368 && OMP_CLAUSE_DECL (*pc) == orig_decl)
10369 break;
10370 if (*pc == NULL_TREE)
10372 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
10374 /* private clause will appear only on inner_for_stmt.
10375 Change it into firstprivate, and add private clause
10376 on for_stmt. */
10377 tree c = copy_node (*pc);
10378 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10379 OMP_FOR_CLAUSES (for_stmt) = c;
10380 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
10381 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
10383 else
10385 /* lastprivate clause will appear on both inner_for_stmt
10386 and for_stmt. Add firstprivate clause to
10387 inner_for_stmt. */
10388 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
10389 OMP_CLAUSE_FIRSTPRIVATE);
10390 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
10391 OMP_CLAUSE_CHAIN (c) = *pc;
10392 *pc = c;
10393 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
10395 tree c = build_omp_clause (UNKNOWN_LOCATION,
10396 OMP_CLAUSE_FIRSTPRIVATE);
10397 OMP_CLAUSE_DECL (c) = last;
10398 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10399 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10400 c = build_omp_clause (UNKNOWN_LOCATION,
10401 *pc ? OMP_CLAUSE_SHARED
10402 : OMP_CLAUSE_FIRSTPRIVATE);
10403 OMP_CLAUSE_DECL (c) = orig_decl;
10404 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10405 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10407 /* Similarly, take care of C++ range for temporaries, those should
10408 be firstprivate on OMP_PARALLEL if any. */
10409 if (data[1])
10410 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
10411 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
10412 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10413 i)) == TREE_LIST
10414 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10415 i)))
10417 tree orig
10418 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
10419 tree v = TREE_CHAIN (orig);
10420 tree c = build_omp_clause (UNKNOWN_LOCATION,
10421 OMP_CLAUSE_FIRSTPRIVATE);
10422 /* First add firstprivate clause for the __for_end artificial
10423 decl. */
10424 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
10425 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
10426 == REFERENCE_TYPE)
10427 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
10428 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10429 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10430 if (TREE_VEC_ELT (v, 0))
10432 /* And now the same for __for_range artificial decl if it
10433 exists. */
10434 c = build_omp_clause (UNKNOWN_LOCATION,
10435 OMP_CLAUSE_FIRSTPRIVATE);
10436 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
10437 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
10438 == REFERENCE_TYPE)
10439 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
10440 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10441 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10446 switch (TREE_CODE (for_stmt))
10448 case OMP_FOR:
10449 case OMP_DISTRIBUTE:
10450 break;
10451 case OACC_LOOP:
10452 ort = ORT_ACC;
10453 break;
10454 case OMP_TASKLOOP:
10455 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
10456 ort = ORT_UNTIED_TASKLOOP;
10457 else
10458 ort = ORT_TASKLOOP;
10459 break;
10460 case OMP_SIMD:
10461 ort = ORT_SIMD;
10462 break;
10463 default:
10464 gcc_unreachable ();
10467 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
10468 clause for the IV. */
10469 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10471 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
10472 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10473 decl = TREE_OPERAND (t, 0);
10474 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10475 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10476 && OMP_CLAUSE_DECL (c) == decl)
10478 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
10479 break;
10483 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
10484 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
10485 TREE_CODE (for_stmt));
10487 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
10488 gimplify_omp_ctxp->distribute = true;
10490 /* Handle OMP_FOR_INIT. */
10491 for_pre_body = NULL;
10492 if ((ort == ORT_SIMD
10493 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
10494 && OMP_FOR_PRE_BODY (for_stmt))
10496 has_decl_expr = BITMAP_ALLOC (NULL);
10497 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
10498 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
10499 == VAR_DECL)
10501 t = OMP_FOR_PRE_BODY (for_stmt);
10502 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
10504 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
10506 tree_stmt_iterator si;
10507 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
10508 tsi_next (&si))
10510 t = tsi_stmt (si);
10511 if (TREE_CODE (t) == DECL_EXPR
10512 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
10513 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
10517 if (OMP_FOR_PRE_BODY (for_stmt))
10519 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
10520 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
10521 else
10523 struct gimplify_omp_ctx ctx;
10524 memset (&ctx, 0, sizeof (ctx));
10525 ctx.region_type = ORT_NONE;
10526 gimplify_omp_ctxp = &ctx;
10527 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
10528 gimplify_omp_ctxp = NULL;
10531 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
10533 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
10534 for_stmt = inner_for_stmt;
10536 /* For taskloop, need to gimplify the start, end and step before the
10537 taskloop, outside of the taskloop omp context. */
10538 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10540 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10542 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10543 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
10545 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
10546 TREE_OPERAND (t, 1)
10547 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
10548 gimple_seq_empty_p (for_pre_body)
10549 ? pre_p : &for_pre_body, NULL,
10550 false);
10551 /* Reference to pointer conversion is considered useless,
10552 but is significant for firstprivate clause. Force it
10553 here. */
10554 if (TREE_CODE (type) == POINTER_TYPE
10555 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
10556 == REFERENCE_TYPE))
10558 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
10559 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
10560 TREE_OPERAND (t, 1));
10561 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
10562 ? pre_p : &for_pre_body);
10563 TREE_OPERAND (t, 1) = v;
10565 tree c = build_omp_clause (input_location,
10566 OMP_CLAUSE_FIRSTPRIVATE);
10567 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
10568 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10569 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10572 /* Handle OMP_FOR_COND. */
10573 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10574 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
10576 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
10577 TREE_OPERAND (t, 1)
10578 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
10579 gimple_seq_empty_p (for_pre_body)
10580 ? pre_p : &for_pre_body, NULL,
10581 false);
10582 /* Reference to pointer conversion is considered useless,
10583 but is significant for firstprivate clause. Force it
10584 here. */
10585 if (TREE_CODE (type) == POINTER_TYPE
10586 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
10587 == REFERENCE_TYPE))
10589 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
10590 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
10591 TREE_OPERAND (t, 1));
10592 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
10593 ? pre_p : &for_pre_body);
10594 TREE_OPERAND (t, 1) = v;
10596 tree c = build_omp_clause (input_location,
10597 OMP_CLAUSE_FIRSTPRIVATE);
10598 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
10599 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10600 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10603 /* Handle OMP_FOR_INCR. */
10604 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10605 if (TREE_CODE (t) == MODIFY_EXPR)
10607 decl = TREE_OPERAND (t, 0);
10608 t = TREE_OPERAND (t, 1);
10609 tree *tp = &TREE_OPERAND (t, 1);
10610 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
10611 tp = &TREE_OPERAND (t, 0);
10613 if (!is_gimple_constant (*tp))
10615 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
10616 ? pre_p : &for_pre_body;
10617 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
10618 tree c = build_omp_clause (input_location,
10619 OMP_CLAUSE_FIRSTPRIVATE);
10620 OMP_CLAUSE_DECL (c) = *tp;
10621 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10622 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10627 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
10628 OMP_TASKLOOP);
10631 if (orig_for_stmt != for_stmt)
10632 gimplify_omp_ctxp->combined_loop = true;
10634 for_body = NULL;
10635 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10636 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
10637 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10638 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
10640 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
10641 bool is_doacross = false;
10642 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
10644 is_doacross = true;
10645 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
10646 (OMP_FOR_INIT (for_stmt))
10647 * 2);
10649 int collapse = 1, tile = 0;
10650 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
10651 if (c)
10652 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
10653 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
10654 if (c)
10655 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
10656 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10658 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10659 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10660 decl = TREE_OPERAND (t, 0);
10661 gcc_assert (DECL_P (decl));
10662 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
10663 || POINTER_TYPE_P (TREE_TYPE (decl)));
10664 if (is_doacross)
10666 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
10668 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
10669 if (TREE_CODE (orig_decl) == TREE_LIST)
10671 orig_decl = TREE_PURPOSE (orig_decl);
10672 if (!orig_decl)
10673 orig_decl = decl;
10675 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
10677 else
10678 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
10679 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
10682 /* Make sure the iteration variable is private. */
10683 tree c = NULL_TREE;
10684 tree c2 = NULL_TREE;
10685 if (orig_for_stmt != for_stmt)
10687 /* Preserve this information until we gimplify the inner simd. */
10688 if (has_decl_expr
10689 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
10690 TREE_PRIVATE (t) = 1;
10692 else if (ort == ORT_SIMD)
10694 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10695 (splay_tree_key) decl);
10696 omp_is_private (gimplify_omp_ctxp, decl,
10697 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10698 != 1));
10699 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10700 omp_notice_variable (gimplify_omp_ctxp, decl, true);
10701 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10703 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
10704 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
10705 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
10706 if ((has_decl_expr
10707 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
10708 || TREE_PRIVATE (t))
10710 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10711 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10713 struct gimplify_omp_ctx *outer
10714 = gimplify_omp_ctxp->outer_context;
10715 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
10717 if (outer->region_type == ORT_WORKSHARE
10718 && outer->combined_loop)
10720 n = splay_tree_lookup (outer->variables,
10721 (splay_tree_key)decl);
10722 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10724 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10725 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10727 else
10729 struct gimplify_omp_ctx *octx = outer->outer_context;
10730 if (octx
10731 && octx->region_type == ORT_COMBINED_PARALLEL
10732 && octx->outer_context
10733 && (octx->outer_context->region_type
10734 == ORT_WORKSHARE)
10735 && octx->outer_context->combined_loop)
10737 octx = octx->outer_context;
10738 n = splay_tree_lookup (octx->variables,
10739 (splay_tree_key)decl);
10740 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10742 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10743 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10750 OMP_CLAUSE_DECL (c) = decl;
10751 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10752 OMP_FOR_CLAUSES (for_stmt) = c;
10753 omp_add_variable (gimplify_omp_ctxp, decl, flags);
10754 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
10756 if (outer->region_type == ORT_WORKSHARE
10757 && outer->combined_loop)
10759 if (outer->outer_context
10760 && (outer->outer_context->region_type
10761 == ORT_COMBINED_PARALLEL))
10762 outer = outer->outer_context;
10763 else if (omp_check_private (outer, decl, false))
10764 outer = NULL;
10766 else if (((outer->region_type & ORT_TASKLOOP)
10767 == ORT_TASKLOOP)
10768 && outer->combined_loop
10769 && !omp_check_private (gimplify_omp_ctxp,
10770 decl, false))
10772 else if (outer->region_type != ORT_COMBINED_PARALLEL)
10774 omp_notice_variable (outer, decl, true);
10775 outer = NULL;
10777 if (outer)
10779 n = splay_tree_lookup (outer->variables,
10780 (splay_tree_key)decl);
10781 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10783 omp_add_variable (outer, decl,
10784 GOVD_LASTPRIVATE | GOVD_SEEN);
10785 if (outer->region_type == ORT_COMBINED_PARALLEL
10786 && outer->outer_context
10787 && (outer->outer_context->region_type
10788 == ORT_WORKSHARE)
10789 && outer->outer_context->combined_loop)
10791 outer = outer->outer_context;
10792 n = splay_tree_lookup (outer->variables,
10793 (splay_tree_key)decl);
10794 if (omp_check_private (outer, decl, false))
10795 outer = NULL;
10796 else if (n == NULL
10797 || ((n->value & GOVD_DATA_SHARE_CLASS)
10798 == 0))
10799 omp_add_variable (outer, decl,
10800 GOVD_LASTPRIVATE
10801 | GOVD_SEEN);
10802 else
10803 outer = NULL;
10805 if (outer && outer->outer_context
10806 && ((outer->outer_context->region_type
10807 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
10808 || (((outer->region_type & ORT_TASKLOOP)
10809 == ORT_TASKLOOP)
10810 && (outer->outer_context->region_type
10811 == ORT_COMBINED_PARALLEL))))
10813 outer = outer->outer_context;
10814 n = splay_tree_lookup (outer->variables,
10815 (splay_tree_key)decl);
10816 if (n == NULL
10817 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10818 omp_add_variable (outer, decl,
10819 GOVD_SHARED | GOVD_SEEN);
10820 else
10821 outer = NULL;
10823 if (outer && outer->outer_context)
10824 omp_notice_variable (outer->outer_context, decl,
10825 true);
10830 else
10832 bool lastprivate
10833 = (!has_decl_expr
10834 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
10835 if (TREE_PRIVATE (t))
10836 lastprivate = false;
10837 struct gimplify_omp_ctx *outer
10838 = gimplify_omp_ctxp->outer_context;
10839 if (outer && lastprivate)
10841 if (outer->region_type == ORT_WORKSHARE
10842 && outer->combined_loop)
10844 n = splay_tree_lookup (outer->variables,
10845 (splay_tree_key)decl);
10846 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10848 lastprivate = false;
10849 outer = NULL;
10851 else if (outer->outer_context
10852 && (outer->outer_context->region_type
10853 == ORT_COMBINED_PARALLEL))
10854 outer = outer->outer_context;
10855 else if (omp_check_private (outer, decl, false))
10856 outer = NULL;
10858 else if (((outer->region_type & ORT_TASKLOOP)
10859 == ORT_TASKLOOP)
10860 && outer->combined_loop
10861 && !omp_check_private (gimplify_omp_ctxp,
10862 decl, false))
10864 else if (outer->region_type != ORT_COMBINED_PARALLEL)
10866 omp_notice_variable (outer, decl, true);
10867 outer = NULL;
10869 if (outer)
10871 n = splay_tree_lookup (outer->variables,
10872 (splay_tree_key)decl);
10873 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10875 omp_add_variable (outer, decl,
10876 GOVD_LASTPRIVATE | GOVD_SEEN);
10877 if (outer->region_type == ORT_COMBINED_PARALLEL
10878 && outer->outer_context
10879 && (outer->outer_context->region_type
10880 == ORT_WORKSHARE)
10881 && outer->outer_context->combined_loop)
10883 outer = outer->outer_context;
10884 n = splay_tree_lookup (outer->variables,
10885 (splay_tree_key)decl);
10886 if (omp_check_private (outer, decl, false))
10887 outer = NULL;
10888 else if (n == NULL
10889 || ((n->value & GOVD_DATA_SHARE_CLASS)
10890 == 0))
10891 omp_add_variable (outer, decl,
10892 GOVD_LASTPRIVATE
10893 | GOVD_SEEN);
10894 else
10895 outer = NULL;
10897 if (outer && outer->outer_context
10898 && ((outer->outer_context->region_type
10899 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
10900 || (((outer->region_type & ORT_TASKLOOP)
10901 == ORT_TASKLOOP)
10902 && (outer->outer_context->region_type
10903 == ORT_COMBINED_PARALLEL))))
10905 outer = outer->outer_context;
10906 n = splay_tree_lookup (outer->variables,
10907 (splay_tree_key)decl);
10908 if (n == NULL
10909 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10910 omp_add_variable (outer, decl,
10911 GOVD_SHARED | GOVD_SEEN);
10912 else
10913 outer = NULL;
10915 if (outer && outer->outer_context)
10916 omp_notice_variable (outer->outer_context, decl,
10917 true);
10922 c = build_omp_clause (input_location,
10923 lastprivate ? OMP_CLAUSE_LASTPRIVATE
10924 : OMP_CLAUSE_PRIVATE);
10925 OMP_CLAUSE_DECL (c) = decl;
10926 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10927 OMP_FOR_CLAUSES (for_stmt) = c;
10928 omp_add_variable (gimplify_omp_ctxp, decl,
10929 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
10930 | GOVD_EXPLICIT | GOVD_SEEN);
10931 c = NULL_TREE;
10934 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
10935 omp_notice_variable (gimplify_omp_ctxp, decl, true);
10936 else
10937 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
10939 /* If DECL is not a gimple register, create a temporary variable to act
10940 as an iteration counter. This is valid, since DECL cannot be
10941 modified in the body of the loop. Similarly for any iteration vars
10942 in simd with collapse > 1 where the iterator vars must be
10943 lastprivate. */
10944 if (orig_for_stmt != for_stmt)
10945 var = decl;
10946 else if (!is_gimple_reg (decl)
10947 || (ort == ORT_SIMD
10948 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
10950 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10951 /* Make sure omp_add_variable is not called on it prematurely.
10952 We call it ourselves a few lines later. */
10953 gimplify_omp_ctxp = NULL;
10954 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10955 gimplify_omp_ctxp = ctx;
10956 TREE_OPERAND (t, 0) = var;
10958 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
10960 if (ort == ORT_SIMD
10961 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10963 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
10964 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
10965 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
10966 OMP_CLAUSE_DECL (c2) = var;
10967 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
10968 OMP_FOR_CLAUSES (for_stmt) = c2;
10969 omp_add_variable (gimplify_omp_ctxp, var,
10970 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
10971 if (c == NULL_TREE)
10973 c = c2;
10974 c2 = NULL_TREE;
10977 else
10978 omp_add_variable (gimplify_omp_ctxp, var,
10979 GOVD_PRIVATE | GOVD_SEEN);
10981 else
10982 var = decl;
10984 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10985 is_gimple_val, fb_rvalue, false);
10986 ret = MIN (ret, tret);
10987 if (ret == GS_ERROR)
10988 return ret;
10990 /* Handle OMP_FOR_COND. */
10991 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10992 gcc_assert (COMPARISON_CLASS_P (t));
10993 gcc_assert (TREE_OPERAND (t, 0) == decl);
10995 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10996 is_gimple_val, fb_rvalue, false);
10997 ret = MIN (ret, tret);
10999 /* Handle OMP_FOR_INCR. */
11000 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11001 switch (TREE_CODE (t))
11003 case PREINCREMENT_EXPR:
11004 case POSTINCREMENT_EXPR:
11006 tree decl = TREE_OPERAND (t, 0);
11007 /* c_omp_for_incr_canonicalize_ptr() should have been
11008 called to massage things appropriately. */
11009 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11011 if (orig_for_stmt != for_stmt)
11012 break;
11013 t = build_int_cst (TREE_TYPE (decl), 1);
11014 if (c)
11015 OMP_CLAUSE_LINEAR_STEP (c) = t;
11016 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11017 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11018 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11019 break;
11022 case PREDECREMENT_EXPR:
11023 case POSTDECREMENT_EXPR:
11024 /* c_omp_for_incr_canonicalize_ptr() should have been
11025 called to massage things appropriately. */
11026 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11027 if (orig_for_stmt != for_stmt)
11028 break;
11029 t = build_int_cst (TREE_TYPE (decl), -1);
11030 if (c)
11031 OMP_CLAUSE_LINEAR_STEP (c) = t;
11032 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11033 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11034 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11035 break;
11037 case MODIFY_EXPR:
11038 gcc_assert (TREE_OPERAND (t, 0) == decl);
11039 TREE_OPERAND (t, 0) = var;
11041 t = TREE_OPERAND (t, 1);
11042 switch (TREE_CODE (t))
11044 case PLUS_EXPR:
11045 if (TREE_OPERAND (t, 1) == decl)
11047 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
11048 TREE_OPERAND (t, 0) = var;
11049 break;
11052 /* Fallthru. */
11053 case MINUS_EXPR:
11054 case POINTER_PLUS_EXPR:
11055 gcc_assert (TREE_OPERAND (t, 0) == decl);
11056 TREE_OPERAND (t, 0) = var;
11057 break;
11058 default:
11059 gcc_unreachable ();
11062 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11063 is_gimple_val, fb_rvalue, false);
11064 ret = MIN (ret, tret);
11065 if (c)
11067 tree step = TREE_OPERAND (t, 1);
11068 tree stept = TREE_TYPE (decl);
11069 if (POINTER_TYPE_P (stept))
11070 stept = sizetype;
11071 step = fold_convert (stept, step);
11072 if (TREE_CODE (t) == MINUS_EXPR)
11073 step = fold_build1 (NEGATE_EXPR, stept, step);
11074 OMP_CLAUSE_LINEAR_STEP (c) = step;
11075 if (step != TREE_OPERAND (t, 1))
11077 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
11078 &for_pre_body, NULL,
11079 is_gimple_val, fb_rvalue, false);
11080 ret = MIN (ret, tret);
11083 break;
11085 default:
11086 gcc_unreachable ();
11089 if (c2)
11091 gcc_assert (c);
11092 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
11095 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
11097 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
11098 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11099 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
11100 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11101 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
11102 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
11103 && OMP_CLAUSE_DECL (c) == decl)
11105 if (is_doacross && (collapse == 1 || i >= collapse))
11106 t = var;
11107 else
11109 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11110 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11111 gcc_assert (TREE_OPERAND (t, 0) == var);
11112 t = TREE_OPERAND (t, 1);
11113 gcc_assert (TREE_CODE (t) == PLUS_EXPR
11114 || TREE_CODE (t) == MINUS_EXPR
11115 || TREE_CODE (t) == POINTER_PLUS_EXPR);
11116 gcc_assert (TREE_OPERAND (t, 0) == var);
11117 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
11118 is_doacross ? var : decl,
11119 TREE_OPERAND (t, 1));
11121 gimple_seq *seq;
11122 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11123 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
11124 else
11125 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
11126 gimplify_assign (decl, t, seq);
11131 BITMAP_FREE (has_decl_expr);
11133 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11135 push_gimplify_context ();
11136 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
11138 OMP_FOR_BODY (orig_for_stmt)
11139 = build3 (BIND_EXPR, void_type_node, NULL,
11140 OMP_FOR_BODY (orig_for_stmt), NULL);
11141 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
11145 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
11146 &for_body);
11148 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11150 if (gimple_code (g) == GIMPLE_BIND)
11151 pop_gimplify_context (g);
11152 else
11153 pop_gimplify_context (NULL);
11156 if (orig_for_stmt != for_stmt)
11157 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11159 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11160 decl = TREE_OPERAND (t, 0);
11161 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11162 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11163 gimplify_omp_ctxp = ctx->outer_context;
11164 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11165 gimplify_omp_ctxp = ctx;
11166 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
11167 TREE_OPERAND (t, 0) = var;
11168 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11169 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
11170 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
11173 gimplify_adjust_omp_clauses (pre_p, for_body,
11174 &OMP_FOR_CLAUSES (orig_for_stmt),
11175 TREE_CODE (orig_for_stmt));
11177 int kind;
11178 switch (TREE_CODE (orig_for_stmt))
11180 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
11181 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
11182 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
11183 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
11184 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
11185 default:
11186 gcc_unreachable ();
11188 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
11189 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
11190 for_pre_body);
11191 if (orig_for_stmt != for_stmt)
11192 gimple_omp_for_set_combined_p (gfor, true);
11193 if (gimplify_omp_ctxp
11194 && (gimplify_omp_ctxp->combined_loop
11195 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
11196 && gimplify_omp_ctxp->outer_context
11197 && gimplify_omp_ctxp->outer_context->combined_loop)))
11199 gimple_omp_for_set_combined_into_p (gfor, true);
11200 if (gimplify_omp_ctxp->combined_loop)
11201 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
11202 else
11203 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
11206 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11208 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11209 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
11210 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
11211 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11212 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
11213 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
11214 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11215 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
11218 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
11219 constructs with GIMPLE_OMP_TASK sandwiched in between them.
11220 The outer taskloop stands for computing the number of iterations,
11221 counts for collapsed loops and holding taskloop specific clauses.
11222 The task construct stands for the effect of data sharing on the
11223 explicit task it creates and the inner taskloop stands for expansion
11224 of the static loop inside of the explicit task construct. */
11225 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11227 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
11228 tree task_clauses = NULL_TREE;
11229 tree c = *gfor_clauses_ptr;
11230 tree *gtask_clauses_ptr = &task_clauses;
11231 tree outer_for_clauses = NULL_TREE;
11232 tree *gforo_clauses_ptr = &outer_for_clauses;
11233 for (; c; c = OMP_CLAUSE_CHAIN (c))
11234 switch (OMP_CLAUSE_CODE (c))
11236 /* These clauses are allowed on task, move them there. */
11237 case OMP_CLAUSE_SHARED:
11238 case OMP_CLAUSE_FIRSTPRIVATE:
11239 case OMP_CLAUSE_DEFAULT:
11240 case OMP_CLAUSE_IF:
11241 case OMP_CLAUSE_UNTIED:
11242 case OMP_CLAUSE_FINAL:
11243 case OMP_CLAUSE_MERGEABLE:
11244 case OMP_CLAUSE_PRIORITY:
11245 case OMP_CLAUSE_REDUCTION:
11246 case OMP_CLAUSE_IN_REDUCTION:
11247 *gtask_clauses_ptr = c;
11248 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11249 break;
11250 case OMP_CLAUSE_PRIVATE:
11251 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
11253 /* We want private on outer for and firstprivate
11254 on task. */
11255 *gtask_clauses_ptr
11256 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11257 OMP_CLAUSE_FIRSTPRIVATE);
11258 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11259 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
11260 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11261 *gforo_clauses_ptr = c;
11262 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11264 else
11266 *gtask_clauses_ptr = c;
11267 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11269 break;
11270 /* These clauses go into outer taskloop clauses. */
11271 case OMP_CLAUSE_GRAINSIZE:
11272 case OMP_CLAUSE_NUM_TASKS:
11273 case OMP_CLAUSE_NOGROUP:
11274 *gforo_clauses_ptr = c;
11275 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11276 break;
11277 /* Taskloop clause we duplicate on both taskloops. */
11278 case OMP_CLAUSE_COLLAPSE:
11279 *gfor_clauses_ptr = c;
11280 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11281 *gforo_clauses_ptr = copy_node (c);
11282 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
11283 break;
11284 /* For lastprivate, keep the clause on inner taskloop, and add
11285 a shared clause on task. If the same decl is also firstprivate,
11286 add also firstprivate clause on the inner taskloop. */
11287 case OMP_CLAUSE_LASTPRIVATE:
11288 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
11290 /* For taskloop C++ lastprivate IVs, we want:
11291 1) private on outer taskloop
11292 2) firstprivate and shared on task
11293 3) lastprivate on inner taskloop */
11294 *gtask_clauses_ptr
11295 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11296 OMP_CLAUSE_FIRSTPRIVATE);
11297 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11298 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
11299 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11300 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
11301 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11302 OMP_CLAUSE_PRIVATE);
11303 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
11304 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
11305 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
11306 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
11308 *gfor_clauses_ptr = c;
11309 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11310 *gtask_clauses_ptr
11311 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
11312 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11313 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
11314 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
11315 gtask_clauses_ptr
11316 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11317 break;
11318 default:
11319 gcc_unreachable ();
11321 *gfor_clauses_ptr = NULL_TREE;
11322 *gtask_clauses_ptr = NULL_TREE;
11323 *gforo_clauses_ptr = NULL_TREE;
11324 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
11325 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
11326 NULL_TREE, NULL_TREE, NULL_TREE);
11327 gimple_omp_task_set_taskloop_p (g, true);
11328 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
11329 gomp_for *gforo
11330 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
11331 gimple_omp_for_collapse (gfor),
11332 gimple_omp_for_pre_body (gfor));
11333 gimple_omp_for_set_pre_body (gfor, NULL);
11334 gimple_omp_for_set_combined_p (gforo, true);
11335 gimple_omp_for_set_combined_into_p (gfor, true);
11336 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
11338 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
11339 tree v = create_tmp_var (type);
11340 gimple_omp_for_set_index (gforo, i, v);
11341 t = unshare_expr (gimple_omp_for_initial (gfor, i));
11342 gimple_omp_for_set_initial (gforo, i, t);
11343 gimple_omp_for_set_cond (gforo, i,
11344 gimple_omp_for_cond (gfor, i));
11345 t = unshare_expr (gimple_omp_for_final (gfor, i));
11346 gimple_omp_for_set_final (gforo, i, t);
11347 t = unshare_expr (gimple_omp_for_incr (gfor, i));
11348 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
11349 TREE_OPERAND (t, 0) = v;
11350 gimple_omp_for_set_incr (gforo, i, t);
11351 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
11352 OMP_CLAUSE_DECL (t) = v;
11353 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
11354 gimple_omp_for_set_clauses (gforo, t);
11356 gimplify_seq_add_stmt (pre_p, gforo);
11358 else
11359 gimplify_seq_add_stmt (pre_p, gfor);
11360 if (ret != GS_ALL_DONE)
11361 return GS_ERROR;
11362 *expr_p = NULL_TREE;
11363 return GS_ALL_DONE;
11366 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
11367 of OMP_TARGET's body. */
11369 static tree
11370 find_omp_teams (tree *tp, int *walk_subtrees, void *)
11372 *walk_subtrees = 0;
11373 switch (TREE_CODE (*tp))
11375 case OMP_TEAMS:
11376 return *tp;
11377 case BIND_EXPR:
11378 case STATEMENT_LIST:
11379 *walk_subtrees = 1;
11380 break;
11381 default:
11382 break;
11384 return NULL_TREE;
11387 /* Helper function of optimize_target_teams, determine if the expression
11388 can be computed safely before the target construct on the host. */
11390 static tree
11391 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
11393 splay_tree_node n;
11395 if (TYPE_P (*tp))
11397 *walk_subtrees = 0;
11398 return NULL_TREE;
11400 switch (TREE_CODE (*tp))
11402 case VAR_DECL:
11403 case PARM_DECL:
11404 case RESULT_DECL:
11405 *walk_subtrees = 0;
11406 if (error_operand_p (*tp)
11407 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
11408 || DECL_HAS_VALUE_EXPR_P (*tp)
11409 || DECL_THREAD_LOCAL_P (*tp)
11410 || TREE_SIDE_EFFECTS (*tp)
11411 || TREE_THIS_VOLATILE (*tp))
11412 return *tp;
11413 if (is_global_var (*tp)
11414 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
11415 || lookup_attribute ("omp declare target link",
11416 DECL_ATTRIBUTES (*tp))))
11417 return *tp;
11418 if (VAR_P (*tp)
11419 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
11420 && !is_global_var (*tp)
11421 && decl_function_context (*tp) == current_function_decl)
11422 return *tp;
11423 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11424 (splay_tree_key) *tp);
11425 if (n == NULL)
11427 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
11428 return NULL_TREE;
11429 return *tp;
11431 else if (n->value & GOVD_LOCAL)
11432 return *tp;
11433 else if (n->value & GOVD_FIRSTPRIVATE)
11434 return NULL_TREE;
11435 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
11436 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
11437 return NULL_TREE;
11438 return *tp;
11439 case INTEGER_CST:
11440 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
11441 return *tp;
11442 return NULL_TREE;
11443 case TARGET_EXPR:
11444 if (TARGET_EXPR_INITIAL (*tp)
11445 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
11446 return *tp;
11447 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
11448 walk_subtrees, NULL);
11449 /* Allow some reasonable subset of integral arithmetics. */
11450 case PLUS_EXPR:
11451 case MINUS_EXPR:
11452 case MULT_EXPR:
11453 case TRUNC_DIV_EXPR:
11454 case CEIL_DIV_EXPR:
11455 case FLOOR_DIV_EXPR:
11456 case ROUND_DIV_EXPR:
11457 case TRUNC_MOD_EXPR:
11458 case CEIL_MOD_EXPR:
11459 case FLOOR_MOD_EXPR:
11460 case ROUND_MOD_EXPR:
11461 case RDIV_EXPR:
11462 case EXACT_DIV_EXPR:
11463 case MIN_EXPR:
11464 case MAX_EXPR:
11465 case LSHIFT_EXPR:
11466 case RSHIFT_EXPR:
11467 case BIT_IOR_EXPR:
11468 case BIT_XOR_EXPR:
11469 case BIT_AND_EXPR:
11470 case NEGATE_EXPR:
11471 case ABS_EXPR:
11472 case BIT_NOT_EXPR:
11473 case NON_LVALUE_EXPR:
11474 CASE_CONVERT:
11475 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
11476 return *tp;
11477 return NULL_TREE;
11478 /* And disallow anything else, except for comparisons. */
11479 default:
11480 if (COMPARISON_CLASS_P (*tp))
11481 return NULL_TREE;
11482 return *tp;
11486 /* Try to determine if the num_teams and/or thread_limit expressions
11487 can have their values determined already before entering the
11488 target construct.
11489 INTEGER_CSTs trivially are,
11490 integral decls that are firstprivate (explicitly or implicitly)
11491 or explicitly map(always, to:) or map(always, tofrom:) on the target
11492 region too, and expressions involving simple arithmetics on those
11493 too, function calls are not ok, dereferencing something neither etc.
11494 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
11495 EXPR based on what we find:
11496 0 stands for clause not specified at all, use implementation default
11497 -1 stands for value that can't be determined easily before entering
11498 the target construct.
11499 If teams construct is not present at all, use 1 for num_teams
11500 and 0 for thread_limit (only one team is involved, and the thread
11501 limit is implementation defined. */
11503 static void
11504 optimize_target_teams (tree target, gimple_seq *pre_p)
11506 tree body = OMP_BODY (target);
11507 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
11508 tree num_teams = integer_zero_node;
11509 tree thread_limit = integer_zero_node;
11510 location_t num_teams_loc = EXPR_LOCATION (target);
11511 location_t thread_limit_loc = EXPR_LOCATION (target);
11512 tree c, *p, expr;
11513 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
11515 if (teams == NULL_TREE)
11516 num_teams = integer_one_node;
11517 else
11518 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
11520 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
11522 p = &num_teams;
11523 num_teams_loc = OMP_CLAUSE_LOCATION (c);
11525 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
11527 p = &thread_limit;
11528 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
11530 else
11531 continue;
11532 expr = OMP_CLAUSE_OPERAND (c, 0);
11533 if (TREE_CODE (expr) == INTEGER_CST)
11535 *p = expr;
11536 continue;
11538 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
11540 *p = integer_minus_one_node;
11541 continue;
11543 *p = expr;
11544 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
11545 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
11546 == GS_ERROR)
11548 gimplify_omp_ctxp = target_ctx;
11549 *p = integer_minus_one_node;
11550 continue;
11552 gimplify_omp_ctxp = target_ctx;
11553 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
11554 OMP_CLAUSE_OPERAND (c, 0) = *p;
11556 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
11557 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
11558 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
11559 OMP_TARGET_CLAUSES (target) = c;
11560 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
11561 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
11562 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
11563 OMP_TARGET_CLAUSES (target) = c;
11566 /* Gimplify the gross structure of several OMP constructs. */
11568 static void
11569 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
11571 tree expr = *expr_p;
11572 gimple *stmt;
11573 gimple_seq body = NULL;
11574 enum omp_region_type ort;
11576 switch (TREE_CODE (expr))
11578 case OMP_SECTIONS:
11579 case OMP_SINGLE:
11580 ort = ORT_WORKSHARE;
11581 break;
11582 case OMP_TARGET:
11583 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
11584 break;
11585 case OACC_KERNELS:
11586 ort = ORT_ACC_KERNELS;
11587 break;
11588 case OACC_PARALLEL:
11589 ort = ORT_ACC_PARALLEL;
11590 break;
11591 case OACC_DATA:
11592 ort = ORT_ACC_DATA;
11593 break;
11594 case OMP_TARGET_DATA:
11595 ort = ORT_TARGET_DATA;
11596 break;
11597 case OMP_TEAMS:
11598 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
11599 if (gimplify_omp_ctxp == NULL
11600 || (gimplify_omp_ctxp->region_type == ORT_TARGET
11601 && gimplify_omp_ctxp->outer_context == NULL
11602 && lookup_attribute ("omp declare target",
11603 DECL_ATTRIBUTES (current_function_decl))))
11604 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
11605 break;
11606 case OACC_HOST_DATA:
11607 ort = ORT_ACC_HOST_DATA;
11608 break;
11609 default:
11610 gcc_unreachable ();
11612 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
11613 TREE_CODE (expr));
11614 if (TREE_CODE (expr) == OMP_TARGET)
11615 optimize_target_teams (expr, pre_p);
11616 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
11617 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
11619 push_gimplify_context ();
11620 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
11621 if (gimple_code (g) == GIMPLE_BIND)
11622 pop_gimplify_context (g);
11623 else
11624 pop_gimplify_context (NULL);
11625 if ((ort & ORT_TARGET_DATA) != 0)
11627 enum built_in_function end_ix;
11628 switch (TREE_CODE (expr))
11630 case OACC_DATA:
11631 case OACC_HOST_DATA:
11632 end_ix = BUILT_IN_GOACC_DATA_END;
11633 break;
11634 case OMP_TARGET_DATA:
11635 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
11636 break;
11637 default:
11638 gcc_unreachable ();
11640 tree fn = builtin_decl_explicit (end_ix);
11641 g = gimple_build_call (fn, 0);
11642 gimple_seq cleanup = NULL;
11643 gimple_seq_add_stmt (&cleanup, g);
11644 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11645 body = NULL;
11646 gimple_seq_add_stmt (&body, g);
11649 else
11650 gimplify_and_add (OMP_BODY (expr), &body);
11651 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
11652 TREE_CODE (expr));
11654 switch (TREE_CODE (expr))
11656 case OACC_DATA:
11657 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
11658 OMP_CLAUSES (expr));
11659 break;
11660 case OACC_KERNELS:
11661 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
11662 OMP_CLAUSES (expr));
11663 break;
11664 case OACC_HOST_DATA:
11665 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
11666 OMP_CLAUSES (expr));
11667 break;
11668 case OACC_PARALLEL:
11669 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
11670 OMP_CLAUSES (expr));
11671 break;
11672 case OMP_SECTIONS:
11673 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
11674 break;
11675 case OMP_SINGLE:
11676 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
11677 break;
11678 case OMP_TARGET:
11679 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
11680 OMP_CLAUSES (expr));
11681 break;
11682 case OMP_TARGET_DATA:
11683 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
11684 OMP_CLAUSES (expr));
11685 break;
11686 case OMP_TEAMS:
11687 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
11688 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
11689 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
11690 break;
11691 default:
11692 gcc_unreachable ();
11695 gimplify_seq_add_stmt (pre_p, stmt);
11696 *expr_p = NULL_TREE;
11699 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
11700 target update constructs. */
11702 static void
11703 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
11705 tree expr = *expr_p;
11706 int kind;
11707 gomp_target *stmt;
11708 enum omp_region_type ort = ORT_WORKSHARE;
11710 switch (TREE_CODE (expr))
11712 case OACC_ENTER_DATA:
11713 case OACC_EXIT_DATA:
11714 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
11715 ort = ORT_ACC;
11716 break;
11717 case OACC_UPDATE:
11718 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
11719 ort = ORT_ACC;
11720 break;
11721 case OMP_TARGET_UPDATE:
11722 kind = GF_OMP_TARGET_KIND_UPDATE;
11723 break;
11724 case OMP_TARGET_ENTER_DATA:
11725 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
11726 break;
11727 case OMP_TARGET_EXIT_DATA:
11728 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
11729 break;
11730 default:
11731 gcc_unreachable ();
11733 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
11734 ort, TREE_CODE (expr));
11735 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
11736 TREE_CODE (expr));
11737 if (TREE_CODE (expr) == OACC_UPDATE
11738 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
11739 OMP_CLAUSE_IF_PRESENT))
11741 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
11742 clause. */
11743 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11744 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
11745 switch (OMP_CLAUSE_MAP_KIND (c))
11747 case GOMP_MAP_FORCE_TO:
11748 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
11749 break;
11750 case GOMP_MAP_FORCE_FROM:
11751 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
11752 break;
11753 default:
11754 break;
11757 else if (TREE_CODE (expr) == OACC_EXIT_DATA
11758 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
11759 OMP_CLAUSE_FINALIZE))
11761 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote that "finalize"
11762 semantics apply to all mappings of this OpenACC directive. */
11763 bool finalize_marked = false;
11764 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11765 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
11766 switch (OMP_CLAUSE_MAP_KIND (c))
11768 case GOMP_MAP_FROM:
11769 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
11770 finalize_marked = true;
11771 break;
11772 case GOMP_MAP_RELEASE:
11773 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
11774 finalize_marked = true;
11775 break;
11776 default:
11777 /* Check consistency: libgomp relies on the very first data
11778 mapping clause being marked, so make sure we did that before
11779 any other mapping clauses. */
11780 gcc_assert (finalize_marked);
11781 break;
11784 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
11786 gimplify_seq_add_stmt (pre_p, stmt);
11787 *expr_p = NULL_TREE;
11790 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
11791 stabilized the lhs of the atomic operation as *ADDR. Return true if
11792 EXPR is this stabilized form. */
11794 static bool
11795 goa_lhs_expr_p (tree expr, tree addr)
11797 /* Also include casts to other type variants. The C front end is fond
11798 of adding these for e.g. volatile variables. This is like
11799 STRIP_TYPE_NOPS but includes the main variant lookup. */
11800 STRIP_USELESS_TYPE_CONVERSION (expr);
11802 if (TREE_CODE (expr) == INDIRECT_REF)
11804 expr = TREE_OPERAND (expr, 0);
11805 while (expr != addr
11806 && (CONVERT_EXPR_P (expr)
11807 || TREE_CODE (expr) == NON_LVALUE_EXPR)
11808 && TREE_CODE (expr) == TREE_CODE (addr)
11809 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
11811 expr = TREE_OPERAND (expr, 0);
11812 addr = TREE_OPERAND (addr, 0);
11814 if (expr == addr)
11815 return true;
11816 return (TREE_CODE (addr) == ADDR_EXPR
11817 && TREE_CODE (expr) == ADDR_EXPR
11818 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
11820 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
11821 return true;
11822 return false;
11825 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
11826 expression does not involve the lhs, evaluate it into a temporary.
11827 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
11828 or -1 if an error was encountered. */
11830 static int
11831 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
11832 tree lhs_var)
11834 tree expr = *expr_p;
11835 int saw_lhs;
11837 if (goa_lhs_expr_p (expr, lhs_addr))
11839 *expr_p = lhs_var;
11840 return 1;
11842 if (is_gimple_val (expr))
11843 return 0;
11845 saw_lhs = 0;
11846 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
11848 case tcc_binary:
11849 case tcc_comparison:
11850 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
11851 lhs_var);
11852 /* FALLTHRU */
11853 case tcc_unary:
11854 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
11855 lhs_var);
11856 break;
11857 case tcc_expression:
11858 switch (TREE_CODE (expr))
11860 case TRUTH_ANDIF_EXPR:
11861 case TRUTH_ORIF_EXPR:
11862 case TRUTH_AND_EXPR:
11863 case TRUTH_OR_EXPR:
11864 case TRUTH_XOR_EXPR:
11865 case BIT_INSERT_EXPR:
11866 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
11867 lhs_addr, lhs_var);
11868 /* FALLTHRU */
11869 case TRUTH_NOT_EXPR:
11870 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
11871 lhs_addr, lhs_var);
11872 break;
11873 case COMPOUND_EXPR:
11874 /* Break out any preevaluations from cp_build_modify_expr. */
11875 for (; TREE_CODE (expr) == COMPOUND_EXPR;
11876 expr = TREE_OPERAND (expr, 1))
11877 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
11878 *expr_p = expr;
11879 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
11880 default:
11881 break;
11883 break;
11884 case tcc_reference:
11885 if (TREE_CODE (expr) == BIT_FIELD_REF)
11886 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
11887 lhs_addr, lhs_var);
11888 break;
11889 default:
11890 break;
11893 if (saw_lhs == 0)
11895 enum gimplify_status gs;
11896 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
11897 if (gs != GS_ALL_DONE)
11898 saw_lhs = -1;
11901 return saw_lhs;
11904 /* Gimplify an OMP_ATOMIC statement. */
11906 static enum gimplify_status
11907 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
11909 tree addr = TREE_OPERAND (*expr_p, 0);
11910 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
11911 ? NULL : TREE_OPERAND (*expr_p, 1);
11912 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
11913 tree tmp_load;
11914 gomp_atomic_load *loadstmt;
11915 gomp_atomic_store *storestmt;
11917 tmp_load = create_tmp_reg (type);
11918 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
11919 return GS_ERROR;
11921 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
11922 != GS_ALL_DONE)
11923 return GS_ERROR;
11925 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
11926 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
11927 gimplify_seq_add_stmt (pre_p, loadstmt);
11928 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
11929 != GS_ALL_DONE)
11930 return GS_ERROR;
11932 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
11933 rhs = tmp_load;
11934 storestmt
11935 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
11936 gimplify_seq_add_stmt (pre_p, storestmt);
11937 switch (TREE_CODE (*expr_p))
11939 case OMP_ATOMIC_READ:
11940 case OMP_ATOMIC_CAPTURE_OLD:
11941 *expr_p = tmp_load;
11942 gimple_omp_atomic_set_need_value (loadstmt);
11943 break;
11944 case OMP_ATOMIC_CAPTURE_NEW:
11945 *expr_p = rhs;
11946 gimple_omp_atomic_set_need_value (storestmt);
11947 break;
11948 default:
11949 *expr_p = NULL;
11950 break;
11953 return GS_ALL_DONE;
11956 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
11957 body, and adding some EH bits. */
11959 static enum gimplify_status
11960 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
11962 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
11963 gimple *body_stmt;
11964 gtransaction *trans_stmt;
11965 gimple_seq body = NULL;
11966 int subcode = 0;
11968 /* Wrap the transaction body in a BIND_EXPR so we have a context
11969 where to put decls for OMP. */
11970 if (TREE_CODE (tbody) != BIND_EXPR)
11972 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
11973 TREE_SIDE_EFFECTS (bind) = 1;
11974 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
11975 TRANSACTION_EXPR_BODY (expr) = bind;
11978 push_gimplify_context ();
11979 temp = voidify_wrapper_expr (*expr_p, NULL);
11981 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
11982 pop_gimplify_context (body_stmt);
11984 trans_stmt = gimple_build_transaction (body);
11985 if (TRANSACTION_EXPR_OUTER (expr))
11986 subcode = GTMA_IS_OUTER;
11987 else if (TRANSACTION_EXPR_RELAXED (expr))
11988 subcode = GTMA_IS_RELAXED;
11989 gimple_transaction_set_subcode (trans_stmt, subcode);
11991 gimplify_seq_add_stmt (pre_p, trans_stmt);
11993 if (temp)
11995 *expr_p = temp;
11996 return GS_OK;
11999 *expr_p = NULL_TREE;
12000 return GS_ALL_DONE;
12003 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
12004 is the OMP_BODY of the original EXPR (which has already been
12005 gimplified so it's not present in the EXPR).
12007 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
12009 static gimple *
12010 gimplify_omp_ordered (tree expr, gimple_seq body)
12012 tree c, decls;
12013 int failures = 0;
12014 unsigned int i;
12015 tree source_c = NULL_TREE;
12016 tree sink_c = NULL_TREE;
12018 if (gimplify_omp_ctxp)
12020 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12021 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12022 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
12023 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
12024 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
12026 error_at (OMP_CLAUSE_LOCATION (c),
12027 "%<ordered%> construct with %<depend%> clause must be "
12028 "closely nested inside a loop with %<ordered%> clause "
12029 "with a parameter");
12030 failures++;
12032 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12033 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
12035 bool fail = false;
12036 for (decls = OMP_CLAUSE_DECL (c), i = 0;
12037 decls && TREE_CODE (decls) == TREE_LIST;
12038 decls = TREE_CHAIN (decls), ++i)
12039 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
12040 continue;
12041 else if (TREE_VALUE (decls)
12042 != gimplify_omp_ctxp->loop_iter_var[2 * i])
12044 error_at (OMP_CLAUSE_LOCATION (c),
12045 "variable %qE is not an iteration "
12046 "of outermost loop %d, expected %qE",
12047 TREE_VALUE (decls), i + 1,
12048 gimplify_omp_ctxp->loop_iter_var[2 * i]);
12049 fail = true;
12050 failures++;
12052 else
12053 TREE_VALUE (decls)
12054 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
12055 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
12057 error_at (OMP_CLAUSE_LOCATION (c),
12058 "number of variables in %<depend(sink)%> "
12059 "clause does not match number of "
12060 "iteration variables");
12061 failures++;
12063 sink_c = c;
12065 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12066 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
12068 if (source_c)
12070 error_at (OMP_CLAUSE_LOCATION (c),
12071 "more than one %<depend(source)%> clause on an "
12072 "%<ordered%> construct");
12073 failures++;
12075 else
12076 source_c = c;
12079 if (source_c && sink_c)
12081 error_at (OMP_CLAUSE_LOCATION (source_c),
12082 "%<depend(source)%> clause specified together with "
12083 "%<depend(sink:)%> clauses on the same construct");
12084 failures++;
12087 if (failures)
12088 return gimple_build_nop ();
12089 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
12092 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
12093 expression produces a value to be used as an operand inside a GIMPLE
12094 statement, the value will be stored back in *EXPR_P. This value will
12095 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
12096 an SSA_NAME. The corresponding sequence of GIMPLE statements is
12097 emitted in PRE_P and POST_P.
12099 Additionally, this process may overwrite parts of the input
12100 expression during gimplification. Ideally, it should be
12101 possible to do non-destructive gimplification.
12103 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
12104 the expression needs to evaluate to a value to be used as
12105 an operand in a GIMPLE statement, this value will be stored in
12106 *EXPR_P on exit. This happens when the caller specifies one
12107 of fb_lvalue or fb_rvalue fallback flags.
12109 PRE_P will contain the sequence of GIMPLE statements corresponding
12110 to the evaluation of EXPR and all the side-effects that must
12111 be executed before the main expression. On exit, the last
12112 statement of PRE_P is the core statement being gimplified. For
12113 instance, when gimplifying 'if (++a)' the last statement in
12114 PRE_P will be 'if (t.1)' where t.1 is the result of
12115 pre-incrementing 'a'.
12117 POST_P will contain the sequence of GIMPLE statements corresponding
12118 to the evaluation of all the side-effects that must be executed
12119 after the main expression. If this is NULL, the post
12120 side-effects are stored at the end of PRE_P.
12122 The reason why the output is split in two is to handle post
12123 side-effects explicitly. In some cases, an expression may have
12124 inner and outer post side-effects which need to be emitted in
12125 an order different from the one given by the recursive
12126 traversal. For instance, for the expression (*p--)++ the post
12127 side-effects of '--' must actually occur *after* the post
12128 side-effects of '++'. However, gimplification will first visit
12129 the inner expression, so if a separate POST sequence was not
12130 used, the resulting sequence would be:
12132 1 t.1 = *p
12133 2 p = p - 1
12134 3 t.2 = t.1 + 1
12135 4 *p = t.2
12137 However, the post-decrement operation in line #2 must not be
12138 evaluated until after the store to *p at line #4, so the
12139 correct sequence should be:
12141 1 t.1 = *p
12142 2 t.2 = t.1 + 1
12143 3 *p = t.2
12144 4 p = p - 1
12146 So, by specifying a separate post queue, it is possible
12147 to emit the post side-effects in the correct order.
12148 If POST_P is NULL, an internal queue will be used. Before
12149 returning to the caller, the sequence POST_P is appended to
12150 the main output sequence PRE_P.
12152 GIMPLE_TEST_F points to a function that takes a tree T and
12153 returns nonzero if T is in the GIMPLE form requested by the
12154 caller. The GIMPLE predicates are in gimple.c.
12156 FALLBACK tells the function what sort of a temporary we want if
12157 gimplification cannot produce an expression that complies with
12158 GIMPLE_TEST_F.
12160 fb_none means that no temporary should be generated
12161 fb_rvalue means that an rvalue is OK to generate
12162 fb_lvalue means that an lvalue is OK to generate
12163 fb_either means that either is OK, but an lvalue is preferable.
12164 fb_mayfail means that gimplification may fail (in which case
12165 GS_ERROR will be returned)
12167 The return value is either GS_ERROR or GS_ALL_DONE, since this
12168 function iterates until EXPR is completely gimplified or an error
12169 occurs. */
12171 enum gimplify_status
12172 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12173 bool (*gimple_test_f) (tree), fallback_t fallback)
12175 tree tmp;
12176 gimple_seq internal_pre = NULL;
12177 gimple_seq internal_post = NULL;
12178 tree save_expr;
12179 bool is_statement;
12180 location_t saved_location;
12181 enum gimplify_status ret;
12182 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
12183 tree label;
12185 save_expr = *expr_p;
12186 if (save_expr == NULL_TREE)
12187 return GS_ALL_DONE;
12189 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
12190 is_statement = gimple_test_f == is_gimple_stmt;
12191 if (is_statement)
12192 gcc_assert (pre_p);
12194 /* Consistency checks. */
12195 if (gimple_test_f == is_gimple_reg)
12196 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
12197 else if (gimple_test_f == is_gimple_val
12198 || gimple_test_f == is_gimple_call_addr
12199 || gimple_test_f == is_gimple_condexpr
12200 || gimple_test_f == is_gimple_mem_rhs
12201 || gimple_test_f == is_gimple_mem_rhs_or_call
12202 || gimple_test_f == is_gimple_reg_rhs
12203 || gimple_test_f == is_gimple_reg_rhs_or_call
12204 || gimple_test_f == is_gimple_asm_val
12205 || gimple_test_f == is_gimple_mem_ref_addr)
12206 gcc_assert (fallback & fb_rvalue);
12207 else if (gimple_test_f == is_gimple_min_lval
12208 || gimple_test_f == is_gimple_lvalue)
12209 gcc_assert (fallback & fb_lvalue);
12210 else if (gimple_test_f == is_gimple_addressable)
12211 gcc_assert (fallback & fb_either);
12212 else if (gimple_test_f == is_gimple_stmt)
12213 gcc_assert (fallback == fb_none);
12214 else
12216 /* We should have recognized the GIMPLE_TEST_F predicate to
12217 know what kind of fallback to use in case a temporary is
12218 needed to hold the value or address of *EXPR_P. */
12219 gcc_unreachable ();
12222 /* We used to check the predicate here and return immediately if it
12223 succeeds. This is wrong; the design is for gimplification to be
12224 idempotent, and for the predicates to only test for valid forms, not
12225 whether they are fully simplified. */
12226 if (pre_p == NULL)
12227 pre_p = &internal_pre;
12229 if (post_p == NULL)
12230 post_p = &internal_post;
12232 /* Remember the last statements added to PRE_P and POST_P. Every
12233 new statement added by the gimplification helpers needs to be
12234 annotated with location information. To centralize the
12235 responsibility, we remember the last statement that had been
12236 added to both queues before gimplifying *EXPR_P. If
12237 gimplification produces new statements in PRE_P and POST_P, those
12238 statements will be annotated with the same location information
12239 as *EXPR_P. */
12240 pre_last_gsi = gsi_last (*pre_p);
12241 post_last_gsi = gsi_last (*post_p);
12243 saved_location = input_location;
12244 if (save_expr != error_mark_node
12245 && EXPR_HAS_LOCATION (*expr_p))
12246 input_location = EXPR_LOCATION (*expr_p);
12248 /* Loop over the specific gimplifiers until the toplevel node
12249 remains the same. */
12252 /* Strip away as many useless type conversions as possible
12253 at the toplevel. */
12254 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
12256 /* Remember the expr. */
12257 save_expr = *expr_p;
12259 /* Die, die, die, my darling. */
12260 if (error_operand_p (save_expr))
12262 ret = GS_ERROR;
12263 break;
12266 /* Do any language-specific gimplification. */
12267 ret = ((enum gimplify_status)
12268 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
12269 if (ret == GS_OK)
12271 if (*expr_p == NULL_TREE)
12272 break;
12273 if (*expr_p != save_expr)
12274 continue;
12276 else if (ret != GS_UNHANDLED)
12277 break;
12279 /* Make sure that all the cases set 'ret' appropriately. */
12280 ret = GS_UNHANDLED;
12281 switch (TREE_CODE (*expr_p))
12283 /* First deal with the special cases. */
12285 case POSTINCREMENT_EXPR:
12286 case POSTDECREMENT_EXPR:
12287 case PREINCREMENT_EXPR:
12288 case PREDECREMENT_EXPR:
12289 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
12290 fallback != fb_none,
12291 TREE_TYPE (*expr_p));
12292 break;
12294 case VIEW_CONVERT_EXPR:
12295 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
12296 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
12298 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12299 post_p, is_gimple_val, fb_rvalue);
12300 recalculate_side_effects (*expr_p);
12301 break;
12303 /* Fallthru. */
12305 case ARRAY_REF:
12306 case ARRAY_RANGE_REF:
12307 case REALPART_EXPR:
12308 case IMAGPART_EXPR:
12309 case COMPONENT_REF:
12310 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
12311 fallback ? fallback : fb_rvalue);
12312 break;
12314 case COND_EXPR:
12315 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
12317 /* C99 code may assign to an array in a structure value of a
12318 conditional expression, and this has undefined behavior
12319 only on execution, so create a temporary if an lvalue is
12320 required. */
12321 if (fallback == fb_lvalue)
12323 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12324 mark_addressable (*expr_p);
12325 ret = GS_OK;
12327 break;
12329 case CALL_EXPR:
12330 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
12332 /* C99 code may assign to an array in a structure returned
12333 from a function, and this has undefined behavior only on
12334 execution, so create a temporary if an lvalue is
12335 required. */
12336 if (fallback == fb_lvalue)
12338 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12339 mark_addressable (*expr_p);
12340 ret = GS_OK;
12342 break;
12344 case TREE_LIST:
12345 gcc_unreachable ();
12347 case COMPOUND_EXPR:
12348 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
12349 break;
12351 case COMPOUND_LITERAL_EXPR:
12352 ret = gimplify_compound_literal_expr (expr_p, pre_p,
12353 gimple_test_f, fallback);
12354 break;
12356 case MODIFY_EXPR:
12357 case INIT_EXPR:
12358 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
12359 fallback != fb_none);
12360 break;
12362 case TRUTH_ANDIF_EXPR:
12363 case TRUTH_ORIF_EXPR:
12365 /* Preserve the original type of the expression and the
12366 source location of the outer expression. */
12367 tree org_type = TREE_TYPE (*expr_p);
12368 *expr_p = gimple_boolify (*expr_p);
12369 *expr_p = build3_loc (input_location, COND_EXPR,
12370 org_type, *expr_p,
12371 fold_convert_loc
12372 (input_location,
12373 org_type, boolean_true_node),
12374 fold_convert_loc
12375 (input_location,
12376 org_type, boolean_false_node));
12377 ret = GS_OK;
12378 break;
12381 case TRUTH_NOT_EXPR:
12383 tree type = TREE_TYPE (*expr_p);
12384 /* The parsers are careful to generate TRUTH_NOT_EXPR
12385 only with operands that are always zero or one.
12386 We do not fold here but handle the only interesting case
12387 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
12388 *expr_p = gimple_boolify (*expr_p);
12389 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
12390 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
12391 TREE_TYPE (*expr_p),
12392 TREE_OPERAND (*expr_p, 0));
12393 else
12394 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
12395 TREE_TYPE (*expr_p),
12396 TREE_OPERAND (*expr_p, 0),
12397 build_int_cst (TREE_TYPE (*expr_p), 1));
12398 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
12399 *expr_p = fold_convert_loc (input_location, type, *expr_p);
12400 ret = GS_OK;
12401 break;
12404 case ADDR_EXPR:
12405 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
12406 break;
12408 case ANNOTATE_EXPR:
12410 tree cond = TREE_OPERAND (*expr_p, 0);
12411 tree kind = TREE_OPERAND (*expr_p, 1);
12412 tree data = TREE_OPERAND (*expr_p, 2);
12413 tree type = TREE_TYPE (cond);
12414 if (!INTEGRAL_TYPE_P (type))
12416 *expr_p = cond;
12417 ret = GS_OK;
12418 break;
12420 tree tmp = create_tmp_var (type);
12421 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
12422 gcall *call
12423 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
12424 gimple_call_set_lhs (call, tmp);
12425 gimplify_seq_add_stmt (pre_p, call);
12426 *expr_p = tmp;
12427 ret = GS_ALL_DONE;
12428 break;
12431 case VA_ARG_EXPR:
12432 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
12433 break;
12435 CASE_CONVERT:
12436 if (IS_EMPTY_STMT (*expr_p))
12438 ret = GS_ALL_DONE;
12439 break;
12442 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
12443 || fallback == fb_none)
12445 /* Just strip a conversion to void (or in void context) and
12446 try again. */
12447 *expr_p = TREE_OPERAND (*expr_p, 0);
12448 ret = GS_OK;
12449 break;
12452 ret = gimplify_conversion (expr_p);
12453 if (ret == GS_ERROR)
12454 break;
12455 if (*expr_p != save_expr)
12456 break;
12457 /* FALLTHRU */
12459 case FIX_TRUNC_EXPR:
12460 /* unary_expr: ... | '(' cast ')' val | ... */
12461 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12462 is_gimple_val, fb_rvalue);
12463 recalculate_side_effects (*expr_p);
12464 break;
12466 case INDIRECT_REF:
12468 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
12469 bool notrap = TREE_THIS_NOTRAP (*expr_p);
12470 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
12472 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
12473 if (*expr_p != save_expr)
12475 ret = GS_OK;
12476 break;
12479 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12480 is_gimple_reg, fb_rvalue);
12481 if (ret == GS_ERROR)
12482 break;
12484 recalculate_side_effects (*expr_p);
12485 *expr_p = fold_build2_loc (input_location, MEM_REF,
12486 TREE_TYPE (*expr_p),
12487 TREE_OPERAND (*expr_p, 0),
12488 build_int_cst (saved_ptr_type, 0));
12489 TREE_THIS_VOLATILE (*expr_p) = volatilep;
12490 TREE_THIS_NOTRAP (*expr_p) = notrap;
12491 ret = GS_OK;
12492 break;
12495 /* We arrive here through the various re-gimplifcation paths. */
12496 case MEM_REF:
12497 /* First try re-folding the whole thing. */
12498 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
12499 TREE_OPERAND (*expr_p, 0),
12500 TREE_OPERAND (*expr_p, 1));
12501 if (tmp)
12503 REF_REVERSE_STORAGE_ORDER (tmp)
12504 = REF_REVERSE_STORAGE_ORDER (*expr_p);
12505 *expr_p = tmp;
12506 recalculate_side_effects (*expr_p);
12507 ret = GS_OK;
12508 break;
12510 /* Avoid re-gimplifying the address operand if it is already
12511 in suitable form. Re-gimplifying would mark the address
12512 operand addressable. Always gimplify when not in SSA form
12513 as we still may have to gimplify decls with value-exprs. */
12514 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
12515 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
12517 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12518 is_gimple_mem_ref_addr, fb_rvalue);
12519 if (ret == GS_ERROR)
12520 break;
12522 recalculate_side_effects (*expr_p);
12523 ret = GS_ALL_DONE;
12524 break;
12526 /* Constants need not be gimplified. */
12527 case INTEGER_CST:
12528 case REAL_CST:
12529 case FIXED_CST:
12530 case STRING_CST:
12531 case COMPLEX_CST:
12532 case VECTOR_CST:
12533 /* Drop the overflow flag on constants, we do not want
12534 that in the GIMPLE IL. */
12535 if (TREE_OVERFLOW_P (*expr_p))
12536 *expr_p = drop_tree_overflow (*expr_p);
12537 ret = GS_ALL_DONE;
12538 break;
12540 case CONST_DECL:
12541 /* If we require an lvalue, such as for ADDR_EXPR, retain the
12542 CONST_DECL node. Otherwise the decl is replaceable by its
12543 value. */
12544 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
12545 if (fallback & fb_lvalue)
12546 ret = GS_ALL_DONE;
12547 else
12549 *expr_p = DECL_INITIAL (*expr_p);
12550 ret = GS_OK;
12552 break;
12554 case DECL_EXPR:
12555 ret = gimplify_decl_expr (expr_p, pre_p);
12556 break;
12558 case BIND_EXPR:
12559 ret = gimplify_bind_expr (expr_p, pre_p);
12560 break;
12562 case LOOP_EXPR:
12563 ret = gimplify_loop_expr (expr_p, pre_p);
12564 break;
12566 case SWITCH_EXPR:
12567 ret = gimplify_switch_expr (expr_p, pre_p);
12568 break;
12570 case EXIT_EXPR:
12571 ret = gimplify_exit_expr (expr_p);
12572 break;
12574 case GOTO_EXPR:
12575 /* If the target is not LABEL, then it is a computed jump
12576 and the target needs to be gimplified. */
12577 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
12579 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
12580 NULL, is_gimple_val, fb_rvalue);
12581 if (ret == GS_ERROR)
12582 break;
12584 gimplify_seq_add_stmt (pre_p,
12585 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
12586 ret = GS_ALL_DONE;
12587 break;
12589 case PREDICT_EXPR:
12590 gimplify_seq_add_stmt (pre_p,
12591 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
12592 PREDICT_EXPR_OUTCOME (*expr_p)));
12593 ret = GS_ALL_DONE;
12594 break;
12596 case LABEL_EXPR:
12597 ret = gimplify_label_expr (expr_p, pre_p);
12598 label = LABEL_EXPR_LABEL (*expr_p);
12599 gcc_assert (decl_function_context (label) == current_function_decl);
12601 /* If the label is used in a goto statement, or address of the label
12602 is taken, we need to unpoison all variables that were seen so far.
12603 Doing so would prevent us from reporting a false positives. */
12604 if (asan_poisoned_variables
12605 && asan_used_labels != NULL
12606 && asan_used_labels->contains (label))
12607 asan_poison_variables (asan_poisoned_variables, false, pre_p);
12608 break;
12610 case CASE_LABEL_EXPR:
12611 ret = gimplify_case_label_expr (expr_p, pre_p);
12613 if (gimplify_ctxp->live_switch_vars)
12614 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
12615 pre_p);
12616 break;
12618 case RETURN_EXPR:
12619 ret = gimplify_return_expr (*expr_p, pre_p);
12620 break;
12622 case CONSTRUCTOR:
12623 /* Don't reduce this in place; let gimplify_init_constructor work its
12624 magic. Buf if we're just elaborating this for side effects, just
12625 gimplify any element that has side-effects. */
12626 if (fallback == fb_none)
12628 unsigned HOST_WIDE_INT ix;
12629 tree val;
12630 tree temp = NULL_TREE;
12631 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
12632 if (TREE_SIDE_EFFECTS (val))
12633 append_to_statement_list (val, &temp);
12635 *expr_p = temp;
12636 ret = temp ? GS_OK : GS_ALL_DONE;
12638 /* C99 code may assign to an array in a constructed
12639 structure or union, and this has undefined behavior only
12640 on execution, so create a temporary if an lvalue is
12641 required. */
12642 else if (fallback == fb_lvalue)
12644 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
12645 mark_addressable (*expr_p);
12646 ret = GS_OK;
12648 else
12649 ret = GS_ALL_DONE;
12650 break;
12652 /* The following are special cases that are not handled by the
12653 original GIMPLE grammar. */
12655 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
12656 eliminated. */
12657 case SAVE_EXPR:
12658 ret = gimplify_save_expr (expr_p, pre_p, post_p);
12659 break;
12661 case BIT_FIELD_REF:
12662 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12663 post_p, is_gimple_lvalue, fb_either);
12664 recalculate_side_effects (*expr_p);
12665 break;
12667 case TARGET_MEM_REF:
12669 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
12671 if (TMR_BASE (*expr_p))
12672 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
12673 post_p, is_gimple_mem_ref_addr, fb_either);
12674 if (TMR_INDEX (*expr_p))
12675 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
12676 post_p, is_gimple_val, fb_rvalue);
12677 if (TMR_INDEX2 (*expr_p))
12678 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
12679 post_p, is_gimple_val, fb_rvalue);
12680 /* TMR_STEP and TMR_OFFSET are always integer constants. */
12681 ret = MIN (r0, r1);
12683 break;
12685 case NON_LVALUE_EXPR:
12686 /* This should have been stripped above. */
12687 gcc_unreachable ();
12689 case ASM_EXPR:
12690 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
12691 break;
12693 case TRY_FINALLY_EXPR:
12694 case TRY_CATCH_EXPR:
12696 gimple_seq eval, cleanup;
12697 gtry *try_;
12699 /* Calls to destructors are generated automatically in FINALLY/CATCH
12700 block. They should have location as UNKNOWN_LOCATION. However,
12701 gimplify_call_expr will reset these call stmts to input_location
12702 if it finds stmt's location is unknown. To prevent resetting for
12703 destructors, we set the input_location to unknown.
12704 Note that this only affects the destructor calls in FINALLY/CATCH
12705 block, and will automatically reset to its original value by the
12706 end of gimplify_expr. */
12707 input_location = UNKNOWN_LOCATION;
12708 eval = cleanup = NULL;
12709 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
12710 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
12711 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
12712 if (gimple_seq_empty_p (cleanup))
12714 gimple_seq_add_seq (pre_p, eval);
12715 ret = GS_ALL_DONE;
12716 break;
12718 try_ = gimple_build_try (eval, cleanup,
12719 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
12720 ? GIMPLE_TRY_FINALLY
12721 : GIMPLE_TRY_CATCH);
12722 if (EXPR_HAS_LOCATION (save_expr))
12723 gimple_set_location (try_, EXPR_LOCATION (save_expr));
12724 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
12725 gimple_set_location (try_, saved_location);
12726 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
12727 gimple_try_set_catch_is_cleanup (try_,
12728 TRY_CATCH_IS_CLEANUP (*expr_p));
12729 gimplify_seq_add_stmt (pre_p, try_);
12730 ret = GS_ALL_DONE;
12731 break;
12734 case CLEANUP_POINT_EXPR:
12735 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
12736 break;
12738 case TARGET_EXPR:
12739 ret = gimplify_target_expr (expr_p, pre_p, post_p);
12740 break;
12742 case CATCH_EXPR:
12744 gimple *c;
12745 gimple_seq handler = NULL;
12746 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
12747 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
12748 gimplify_seq_add_stmt (pre_p, c);
12749 ret = GS_ALL_DONE;
12750 break;
12753 case EH_FILTER_EXPR:
12755 gimple *ehf;
12756 gimple_seq failure = NULL;
12758 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
12759 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
12760 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
12761 gimplify_seq_add_stmt (pre_p, ehf);
12762 ret = GS_ALL_DONE;
12763 break;
12766 case OBJ_TYPE_REF:
12768 enum gimplify_status r0, r1;
12769 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
12770 post_p, is_gimple_val, fb_rvalue);
12771 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
12772 post_p, is_gimple_val, fb_rvalue);
12773 TREE_SIDE_EFFECTS (*expr_p) = 0;
12774 ret = MIN (r0, r1);
12776 break;
12778 case LABEL_DECL:
12779 /* We get here when taking the address of a label. We mark
12780 the label as "forced"; meaning it can never be removed and
12781 it is a potential target for any computed goto. */
12782 FORCED_LABEL (*expr_p) = 1;
12783 ret = GS_ALL_DONE;
12784 break;
12786 case STATEMENT_LIST:
12787 ret = gimplify_statement_list (expr_p, pre_p);
12788 break;
12790 case WITH_SIZE_EXPR:
12792 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12793 post_p == &internal_post ? NULL : post_p,
12794 gimple_test_f, fallback);
12795 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12796 is_gimple_val, fb_rvalue);
12797 ret = GS_ALL_DONE;
12799 break;
12801 case VAR_DECL:
12802 case PARM_DECL:
12803 ret = gimplify_var_or_parm_decl (expr_p);
12804 break;
12806 case RESULT_DECL:
12807 /* When within an OMP context, notice uses of variables. */
12808 if (gimplify_omp_ctxp)
12809 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
12810 ret = GS_ALL_DONE;
12811 break;
12813 case DEBUG_EXPR_DECL:
12814 gcc_unreachable ();
12816 case DEBUG_BEGIN_STMT:
12817 gimplify_seq_add_stmt (pre_p,
12818 gimple_build_debug_begin_stmt
12819 (TREE_BLOCK (*expr_p),
12820 EXPR_LOCATION (*expr_p)));
12821 ret = GS_ALL_DONE;
12822 *expr_p = NULL;
12823 break;
12825 case SSA_NAME:
12826 /* Allow callbacks into the gimplifier during optimization. */
12827 ret = GS_ALL_DONE;
12828 break;
12830 case OMP_PARALLEL:
12831 gimplify_omp_parallel (expr_p, pre_p);
12832 ret = GS_ALL_DONE;
12833 break;
12835 case OMP_TASK:
12836 gimplify_omp_task (expr_p, pre_p);
12837 ret = GS_ALL_DONE;
12838 break;
12840 case OMP_FOR:
12841 case OMP_SIMD:
12842 case OMP_DISTRIBUTE:
12843 case OMP_TASKLOOP:
12844 case OACC_LOOP:
12845 ret = gimplify_omp_for (expr_p, pre_p);
12846 break;
12848 case OACC_CACHE:
12849 gimplify_oacc_cache (expr_p, pre_p);
12850 ret = GS_ALL_DONE;
12851 break;
12853 case OACC_DECLARE:
12854 gimplify_oacc_declare (expr_p, pre_p);
12855 ret = GS_ALL_DONE;
12856 break;
12858 case OACC_HOST_DATA:
12859 case OACC_DATA:
12860 case OACC_KERNELS:
12861 case OACC_PARALLEL:
12862 case OMP_SECTIONS:
12863 case OMP_SINGLE:
12864 case OMP_TARGET:
12865 case OMP_TARGET_DATA:
12866 case OMP_TEAMS:
12867 gimplify_omp_workshare (expr_p, pre_p);
12868 ret = GS_ALL_DONE;
12869 break;
12871 case OACC_ENTER_DATA:
12872 case OACC_EXIT_DATA:
12873 case OACC_UPDATE:
12874 case OMP_TARGET_UPDATE:
12875 case OMP_TARGET_ENTER_DATA:
12876 case OMP_TARGET_EXIT_DATA:
12877 gimplify_omp_target_update (expr_p, pre_p);
12878 ret = GS_ALL_DONE;
12879 break;
12881 case OMP_SECTION:
12882 case OMP_MASTER:
12883 case OMP_ORDERED:
12884 case OMP_CRITICAL:
12886 gimple_seq body = NULL;
12887 gimple *g;
12889 gimplify_and_add (OMP_BODY (*expr_p), &body);
12890 switch (TREE_CODE (*expr_p))
12892 case OMP_SECTION:
12893 g = gimple_build_omp_section (body);
12894 break;
12895 case OMP_MASTER:
12896 g = gimple_build_omp_master (body);
12897 break;
12898 case OMP_ORDERED:
12899 g = gimplify_omp_ordered (*expr_p, body);
12900 break;
12901 case OMP_CRITICAL:
12902 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
12903 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
12904 gimplify_adjust_omp_clauses (pre_p, body,
12905 &OMP_CRITICAL_CLAUSES (*expr_p),
12906 OMP_CRITICAL);
12907 g = gimple_build_omp_critical (body,
12908 OMP_CRITICAL_NAME (*expr_p),
12909 OMP_CRITICAL_CLAUSES (*expr_p));
12910 break;
12911 default:
12912 gcc_unreachable ();
12914 gimplify_seq_add_stmt (pre_p, g);
12915 ret = GS_ALL_DONE;
12916 break;
12919 case OMP_TASKGROUP:
12921 gimple_seq body = NULL;
12923 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
12924 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
12925 OMP_TASKGROUP);
12926 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
12927 gimplify_and_add (OMP_BODY (*expr_p), &body);
12928 gimple_seq cleanup = NULL;
12929 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
12930 gimple *g = gimple_build_call (fn, 0);
12931 gimple_seq_add_stmt (&cleanup, g);
12932 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
12933 body = NULL;
12934 gimple_seq_add_stmt (&body, g);
12935 g = gimple_build_omp_taskgroup (body, *pclauses);
12936 gimplify_seq_add_stmt (pre_p, g);
12937 ret = GS_ALL_DONE;
12938 break;
12941 case OMP_ATOMIC:
12942 case OMP_ATOMIC_READ:
12943 case OMP_ATOMIC_CAPTURE_OLD:
12944 case OMP_ATOMIC_CAPTURE_NEW:
12945 ret = gimplify_omp_atomic (expr_p, pre_p);
12946 break;
12948 case TRANSACTION_EXPR:
12949 ret = gimplify_transaction (expr_p, pre_p);
12950 break;
12952 case TRUTH_AND_EXPR:
12953 case TRUTH_OR_EXPR:
12954 case TRUTH_XOR_EXPR:
12956 tree orig_type = TREE_TYPE (*expr_p);
12957 tree new_type, xop0, xop1;
12958 *expr_p = gimple_boolify (*expr_p);
12959 new_type = TREE_TYPE (*expr_p);
12960 if (!useless_type_conversion_p (orig_type, new_type))
12962 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
12963 ret = GS_OK;
12964 break;
12967 /* Boolified binary truth expressions are semantically equivalent
12968 to bitwise binary expressions. Canonicalize them to the
12969 bitwise variant. */
12970 switch (TREE_CODE (*expr_p))
12972 case TRUTH_AND_EXPR:
12973 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
12974 break;
12975 case TRUTH_OR_EXPR:
12976 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
12977 break;
12978 case TRUTH_XOR_EXPR:
12979 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
12980 break;
12981 default:
12982 break;
12984 /* Now make sure that operands have compatible type to
12985 expression's new_type. */
12986 xop0 = TREE_OPERAND (*expr_p, 0);
12987 xop1 = TREE_OPERAND (*expr_p, 1);
12988 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
12989 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
12990 new_type,
12991 xop0);
12992 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
12993 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
12994 new_type,
12995 xop1);
12996 /* Continue classified as tcc_binary. */
12997 goto expr_2;
13000 case VEC_COND_EXPR:
13002 enum gimplify_status r0, r1, r2;
13004 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13005 post_p, is_gimple_condexpr, fb_rvalue);
13006 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13007 post_p, is_gimple_val, fb_rvalue);
13008 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
13009 post_p, is_gimple_val, fb_rvalue);
13011 ret = MIN (MIN (r0, r1), r2);
13012 recalculate_side_effects (*expr_p);
13014 break;
13016 case VEC_PERM_EXPR:
13017 /* Classified as tcc_expression. */
13018 goto expr_3;
13020 case BIT_INSERT_EXPR:
13021 /* Argument 3 is a constant. */
13022 goto expr_2;
13024 case POINTER_PLUS_EXPR:
13026 enum gimplify_status r0, r1;
13027 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13028 post_p, is_gimple_val, fb_rvalue);
13029 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13030 post_p, is_gimple_val, fb_rvalue);
13031 recalculate_side_effects (*expr_p);
13032 ret = MIN (r0, r1);
13033 break;
13036 default:
13037 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
13039 case tcc_comparison:
13040 /* Handle comparison of objects of non scalar mode aggregates
13041 with a call to memcmp. It would be nice to only have to do
13042 this for variable-sized objects, but then we'd have to allow
13043 the same nest of reference nodes we allow for MODIFY_EXPR and
13044 that's too complex.
13046 Compare scalar mode aggregates as scalar mode values. Using
13047 memcmp for them would be very inefficient at best, and is
13048 plain wrong if bitfields are involved. */
13050 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
13052 /* Vector comparisons need no boolification. */
13053 if (TREE_CODE (type) == VECTOR_TYPE)
13054 goto expr_2;
13055 else if (!AGGREGATE_TYPE_P (type))
13057 tree org_type = TREE_TYPE (*expr_p);
13058 *expr_p = gimple_boolify (*expr_p);
13059 if (!useless_type_conversion_p (org_type,
13060 TREE_TYPE (*expr_p)))
13062 *expr_p = fold_convert_loc (input_location,
13063 org_type, *expr_p);
13064 ret = GS_OK;
13066 else
13067 goto expr_2;
13069 else if (TYPE_MODE (type) != BLKmode)
13070 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
13071 else
13072 ret = gimplify_variable_sized_compare (expr_p);
13074 break;
13077 /* If *EXPR_P does not need to be special-cased, handle it
13078 according to its class. */
13079 case tcc_unary:
13080 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13081 post_p, is_gimple_val, fb_rvalue);
13082 break;
13084 case tcc_binary:
13085 expr_2:
13087 enum gimplify_status r0, r1;
13089 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13090 post_p, is_gimple_val, fb_rvalue);
13091 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13092 post_p, is_gimple_val, fb_rvalue);
13094 ret = MIN (r0, r1);
13095 break;
13098 expr_3:
13100 enum gimplify_status r0, r1, r2;
13102 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13103 post_p, is_gimple_val, fb_rvalue);
13104 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13105 post_p, is_gimple_val, fb_rvalue);
13106 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
13107 post_p, is_gimple_val, fb_rvalue);
13109 ret = MIN (MIN (r0, r1), r2);
13110 break;
13113 case tcc_declaration:
13114 case tcc_constant:
13115 ret = GS_ALL_DONE;
13116 goto dont_recalculate;
13118 default:
13119 gcc_unreachable ();
13122 recalculate_side_effects (*expr_p);
13124 dont_recalculate:
13125 break;
13128 gcc_assert (*expr_p || ret != GS_OK);
13130 while (ret == GS_OK);
13132 /* If we encountered an error_mark somewhere nested inside, either
13133 stub out the statement or propagate the error back out. */
13134 if (ret == GS_ERROR)
13136 if (is_statement)
13137 *expr_p = NULL;
13138 goto out;
13141 /* This was only valid as a return value from the langhook, which
13142 we handled. Make sure it doesn't escape from any other context. */
13143 gcc_assert (ret != GS_UNHANDLED);
13145 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
13147 /* We aren't looking for a value, and we don't have a valid
13148 statement. If it doesn't have side-effects, throw it away.
13149 We can also get here with code such as "*&&L;", where L is
13150 a LABEL_DECL that is marked as FORCED_LABEL. */
13151 if (TREE_CODE (*expr_p) == LABEL_DECL
13152 || !TREE_SIDE_EFFECTS (*expr_p))
13153 *expr_p = NULL;
13154 else if (!TREE_THIS_VOLATILE (*expr_p))
13156 /* This is probably a _REF that contains something nested that
13157 has side effects. Recurse through the operands to find it. */
13158 enum tree_code code = TREE_CODE (*expr_p);
13160 switch (code)
13162 case COMPONENT_REF:
13163 case REALPART_EXPR:
13164 case IMAGPART_EXPR:
13165 case VIEW_CONVERT_EXPR:
13166 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13167 gimple_test_f, fallback);
13168 break;
13170 case ARRAY_REF:
13171 case ARRAY_RANGE_REF:
13172 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13173 gimple_test_f, fallback);
13174 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
13175 gimple_test_f, fallback);
13176 break;
13178 default:
13179 /* Anything else with side-effects must be converted to
13180 a valid statement before we get here. */
13181 gcc_unreachable ();
13184 *expr_p = NULL;
13186 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
13187 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
13189 /* Historically, the compiler has treated a bare reference
13190 to a non-BLKmode volatile lvalue as forcing a load. */
13191 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
13193 /* Normally, we do not want to create a temporary for a
13194 TREE_ADDRESSABLE type because such a type should not be
13195 copied by bitwise-assignment. However, we make an
13196 exception here, as all we are doing here is ensuring that
13197 we read the bytes that make up the type. We use
13198 create_tmp_var_raw because create_tmp_var will abort when
13199 given a TREE_ADDRESSABLE type. */
13200 tree tmp = create_tmp_var_raw (type, "vol");
13201 gimple_add_tmp_var (tmp);
13202 gimplify_assign (tmp, *expr_p, pre_p);
13203 *expr_p = NULL;
13205 else
13206 /* We can't do anything useful with a volatile reference to
13207 an incomplete type, so just throw it away. Likewise for
13208 a BLKmode type, since any implicit inner load should
13209 already have been turned into an explicit one by the
13210 gimplification process. */
13211 *expr_p = NULL;
13214 /* If we are gimplifying at the statement level, we're done. Tack
13215 everything together and return. */
13216 if (fallback == fb_none || is_statement)
13218 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
13219 it out for GC to reclaim it. */
13220 *expr_p = NULL_TREE;
13222 if (!gimple_seq_empty_p (internal_pre)
13223 || !gimple_seq_empty_p (internal_post))
13225 gimplify_seq_add_seq (&internal_pre, internal_post);
13226 gimplify_seq_add_seq (pre_p, internal_pre);
13229 /* The result of gimplifying *EXPR_P is going to be the last few
13230 statements in *PRE_P and *POST_P. Add location information
13231 to all the statements that were added by the gimplification
13232 helpers. */
13233 if (!gimple_seq_empty_p (*pre_p))
13234 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
13236 if (!gimple_seq_empty_p (*post_p))
13237 annotate_all_with_location_after (*post_p, post_last_gsi,
13238 input_location);
13240 goto out;
13243 #ifdef ENABLE_GIMPLE_CHECKING
13244 if (*expr_p)
13246 enum tree_code code = TREE_CODE (*expr_p);
13247 /* These expressions should already be in gimple IR form. */
13248 gcc_assert (code != MODIFY_EXPR
13249 && code != ASM_EXPR
13250 && code != BIND_EXPR
13251 && code != CATCH_EXPR
13252 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
13253 && code != EH_FILTER_EXPR
13254 && code != GOTO_EXPR
13255 && code != LABEL_EXPR
13256 && code != LOOP_EXPR
13257 && code != SWITCH_EXPR
13258 && code != TRY_FINALLY_EXPR
13259 && code != OACC_PARALLEL
13260 && code != OACC_KERNELS
13261 && code != OACC_DATA
13262 && code != OACC_HOST_DATA
13263 && code != OACC_DECLARE
13264 && code != OACC_UPDATE
13265 && code != OACC_ENTER_DATA
13266 && code != OACC_EXIT_DATA
13267 && code != OACC_CACHE
13268 && code != OMP_CRITICAL
13269 && code != OMP_FOR
13270 && code != OACC_LOOP
13271 && code != OMP_MASTER
13272 && code != OMP_TASKGROUP
13273 && code != OMP_ORDERED
13274 && code != OMP_PARALLEL
13275 && code != OMP_SECTIONS
13276 && code != OMP_SECTION
13277 && code != OMP_SINGLE);
13279 #endif
13281 /* Otherwise we're gimplifying a subexpression, so the resulting
13282 value is interesting. If it's a valid operand that matches
13283 GIMPLE_TEST_F, we're done. Unless we are handling some
13284 post-effects internally; if that's the case, we need to copy into
13285 a temporary before adding the post-effects to POST_P. */
13286 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
13287 goto out;
13289 /* Otherwise, we need to create a new temporary for the gimplified
13290 expression. */
13292 /* We can't return an lvalue if we have an internal postqueue. The
13293 object the lvalue refers to would (probably) be modified by the
13294 postqueue; we need to copy the value out first, which means an
13295 rvalue. */
13296 if ((fallback & fb_lvalue)
13297 && gimple_seq_empty_p (internal_post)
13298 && is_gimple_addressable (*expr_p))
13300 /* An lvalue will do. Take the address of the expression, store it
13301 in a temporary, and replace the expression with an INDIRECT_REF of
13302 that temporary. */
13303 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
13304 unsigned int ref_align = get_object_alignment (*expr_p);
13305 tree ref_type = TREE_TYPE (*expr_p);
13306 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
13307 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
13308 if (TYPE_ALIGN (ref_type) != ref_align)
13309 ref_type = build_aligned_type (ref_type, ref_align);
13310 *expr_p = build2 (MEM_REF, ref_type,
13311 tmp, build_zero_cst (ref_alias_type));
13313 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
13315 /* An rvalue will do. Assign the gimplified expression into a
13316 new temporary TMP and replace the original expression with
13317 TMP. First, make sure that the expression has a type so that
13318 it can be assigned into a temporary. */
13319 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
13320 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
13322 else
13324 #ifdef ENABLE_GIMPLE_CHECKING
13325 if (!(fallback & fb_mayfail))
13327 fprintf (stderr, "gimplification failed:\n");
13328 print_generic_expr (stderr, *expr_p);
13329 debug_tree (*expr_p);
13330 internal_error ("gimplification failed");
13332 #endif
13333 gcc_assert (fallback & fb_mayfail);
13335 /* If this is an asm statement, and the user asked for the
13336 impossible, don't die. Fail and let gimplify_asm_expr
13337 issue an error. */
13338 ret = GS_ERROR;
13339 goto out;
13342 /* Make sure the temporary matches our predicate. */
13343 gcc_assert ((*gimple_test_f) (*expr_p));
13345 if (!gimple_seq_empty_p (internal_post))
13347 annotate_all_with_location (internal_post, input_location);
13348 gimplify_seq_add_seq (pre_p, internal_post);
13351 out:
13352 input_location = saved_location;
13353 return ret;
13356 /* Like gimplify_expr but make sure the gimplified result is not itself
13357 a SSA name (but a decl if it were). Temporaries required by
13358 evaluating *EXPR_P may be still SSA names. */
13360 static enum gimplify_status
13361 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13362 bool (*gimple_test_f) (tree), fallback_t fallback,
13363 bool allow_ssa)
13365 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
13366 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
13367 gimple_test_f, fallback);
13368 if (! allow_ssa
13369 && TREE_CODE (*expr_p) == SSA_NAME)
13371 tree name = *expr_p;
13372 if (was_ssa_name_p)
13373 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
13374 else
13376 /* Avoid the extra copy if possible. */
13377 *expr_p = create_tmp_reg (TREE_TYPE (name));
13378 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
13379 release_ssa_name (name);
13382 return ret;
13385 /* Look through TYPE for variable-sized objects and gimplify each such
13386 size that we find. Add to LIST_P any statements generated. */
13388 void
13389 gimplify_type_sizes (tree type, gimple_seq *list_p)
13391 tree field, t;
13393 if (type == NULL || type == error_mark_node)
13394 return;
13396 /* We first do the main variant, then copy into any other variants. */
13397 type = TYPE_MAIN_VARIANT (type);
13399 /* Avoid infinite recursion. */
13400 if (TYPE_SIZES_GIMPLIFIED (type))
13401 return;
13403 TYPE_SIZES_GIMPLIFIED (type) = 1;
13405 switch (TREE_CODE (type))
13407 case INTEGER_TYPE:
13408 case ENUMERAL_TYPE:
13409 case BOOLEAN_TYPE:
13410 case REAL_TYPE:
13411 case FIXED_POINT_TYPE:
13412 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
13413 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
13415 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
13417 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
13418 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
13420 break;
13422 case ARRAY_TYPE:
13423 /* These types may not have declarations, so handle them here. */
13424 gimplify_type_sizes (TREE_TYPE (type), list_p);
13425 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
13426 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
13427 with assigned stack slots, for -O1+ -g they should be tracked
13428 by VTA. */
13429 if (!(TYPE_NAME (type)
13430 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
13431 && DECL_IGNORED_P (TYPE_NAME (type)))
13432 && TYPE_DOMAIN (type)
13433 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
13435 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
13436 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
13437 DECL_IGNORED_P (t) = 0;
13438 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
13439 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
13440 DECL_IGNORED_P (t) = 0;
13442 break;
13444 case RECORD_TYPE:
13445 case UNION_TYPE:
13446 case QUAL_UNION_TYPE:
13447 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
13448 if (TREE_CODE (field) == FIELD_DECL)
13450 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
13451 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
13452 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
13453 gimplify_type_sizes (TREE_TYPE (field), list_p);
13455 break;
13457 case POINTER_TYPE:
13458 case REFERENCE_TYPE:
13459 /* We used to recurse on the pointed-to type here, which turned out to
13460 be incorrect because its definition might refer to variables not
13461 yet initialized at this point if a forward declaration is involved.
13463 It was actually useful for anonymous pointed-to types to ensure
13464 that the sizes evaluation dominates every possible later use of the
13465 values. Restricting to such types here would be safe since there
13466 is no possible forward declaration around, but would introduce an
13467 undesirable middle-end semantic to anonymity. We then defer to
13468 front-ends the responsibility of ensuring that the sizes are
13469 evaluated both early and late enough, e.g. by attaching artificial
13470 type declarations to the tree. */
13471 break;
13473 default:
13474 break;
13477 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
13478 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
13480 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
13482 TYPE_SIZE (t) = TYPE_SIZE (type);
13483 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
13484 TYPE_SIZES_GIMPLIFIED (t) = 1;
13488 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
13489 a size or position, has had all of its SAVE_EXPRs evaluated.
13490 We add any required statements to *STMT_P. */
13492 void
13493 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
13495 tree expr = *expr_p;
13497 /* We don't do anything if the value isn't there, is constant, or contains
13498 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
13499 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
13500 will want to replace it with a new variable, but that will cause problems
13501 if this type is from outside the function. It's OK to have that here. */
13502 if (expr == NULL_TREE
13503 || is_gimple_constant (expr)
13504 || TREE_CODE (expr) == VAR_DECL
13505 || CONTAINS_PLACEHOLDER_P (expr))
13506 return;
13508 *expr_p = unshare_expr (expr);
13510 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
13511 if the def vanishes. */
13512 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
13514 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
13515 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
13516 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
13517 if (is_gimple_constant (*expr_p))
13518 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
13521 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
13522 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
13523 is true, also gimplify the parameters. */
13525 gbind *
13526 gimplify_body (tree fndecl, bool do_parms)
13528 location_t saved_location = input_location;
13529 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
13530 gimple *outer_stmt;
13531 gbind *outer_bind;
13533 timevar_push (TV_TREE_GIMPLIFY);
13535 init_tree_ssa (cfun);
13537 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
13538 gimplification. */
13539 default_rtl_profile ();
13541 gcc_assert (gimplify_ctxp == NULL);
13542 push_gimplify_context (true);
13544 if (flag_openacc || flag_openmp)
13546 gcc_assert (gimplify_omp_ctxp == NULL);
13547 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
13548 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
13551 /* Unshare most shared trees in the body and in that of any nested functions.
13552 It would seem we don't have to do this for nested functions because
13553 they are supposed to be output and then the outer function gimplified
13554 first, but the g++ front end doesn't always do it that way. */
13555 unshare_body (fndecl);
13556 unvisit_body (fndecl);
13558 /* Make sure input_location isn't set to something weird. */
13559 input_location = DECL_SOURCE_LOCATION (fndecl);
13561 /* Resolve callee-copies. This has to be done before processing
13562 the body so that DECL_VALUE_EXPR gets processed correctly. */
13563 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
13565 /* Gimplify the function's body. */
13566 seq = NULL;
13567 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
13568 outer_stmt = gimple_seq_first_stmt (seq);
13569 if (!outer_stmt)
13571 outer_stmt = gimple_build_nop ();
13572 gimplify_seq_add_stmt (&seq, outer_stmt);
13575 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
13576 not the case, wrap everything in a GIMPLE_BIND to make it so. */
13577 if (gimple_code (outer_stmt) == GIMPLE_BIND
13578 && gimple_seq_first (seq) == gimple_seq_last (seq))
13579 outer_bind = as_a <gbind *> (outer_stmt);
13580 else
13581 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
13583 DECL_SAVED_TREE (fndecl) = NULL_TREE;
13585 /* If we had callee-copies statements, insert them at the beginning
13586 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
13587 if (!gimple_seq_empty_p (parm_stmts))
13589 tree parm;
13591 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
13592 if (parm_cleanup)
13594 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
13595 GIMPLE_TRY_FINALLY);
13596 parm_stmts = NULL;
13597 gimple_seq_add_stmt (&parm_stmts, g);
13599 gimple_bind_set_body (outer_bind, parm_stmts);
13601 for (parm = DECL_ARGUMENTS (current_function_decl);
13602 parm; parm = DECL_CHAIN (parm))
13603 if (DECL_HAS_VALUE_EXPR_P (parm))
13605 DECL_HAS_VALUE_EXPR_P (parm) = 0;
13606 DECL_IGNORED_P (parm) = 0;
13610 if ((flag_openacc || flag_openmp || flag_openmp_simd)
13611 && gimplify_omp_ctxp)
13613 delete_omp_context (gimplify_omp_ctxp);
13614 gimplify_omp_ctxp = NULL;
13617 pop_gimplify_context (outer_bind);
13618 gcc_assert (gimplify_ctxp == NULL);
13620 if (flag_checking && !seen_error ())
13621 verify_gimple_in_seq (gimple_bind_body (outer_bind));
13623 timevar_pop (TV_TREE_GIMPLIFY);
13624 input_location = saved_location;
13626 return outer_bind;
13629 typedef char *char_p; /* For DEF_VEC_P. */
13631 /* Return whether we should exclude FNDECL from instrumentation. */
13633 static bool
13634 flag_instrument_functions_exclude_p (tree fndecl)
13636 vec<char_p> *v;
13638 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
13639 if (v && v->length () > 0)
13641 const char *name;
13642 int i;
13643 char *s;
13645 name = lang_hooks.decl_printable_name (fndecl, 0);
13646 FOR_EACH_VEC_ELT (*v, i, s)
13647 if (strstr (name, s) != NULL)
13648 return true;
13651 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
13652 if (v && v->length () > 0)
13654 const char *name;
13655 int i;
13656 char *s;
13658 name = DECL_SOURCE_FILE (fndecl);
13659 FOR_EACH_VEC_ELT (*v, i, s)
13660 if (strstr (name, s) != NULL)
13661 return true;
13664 return false;
13667 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
13668 node for the function we want to gimplify.
13670 Return the sequence of GIMPLE statements corresponding to the body
13671 of FNDECL. */
13673 void
13674 gimplify_function_tree (tree fndecl)
13676 tree parm, ret;
13677 gimple_seq seq;
13678 gbind *bind;
13680 gcc_assert (!gimple_body (fndecl));
13682 if (DECL_STRUCT_FUNCTION (fndecl))
13683 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
13684 else
13685 push_struct_function (fndecl);
13687 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
13688 if necessary. */
13689 cfun->curr_properties |= PROP_gimple_lva;
13691 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
13693 /* Preliminarily mark non-addressed complex variables as eligible
13694 for promotion to gimple registers. We'll transform their uses
13695 as we find them. */
13696 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
13697 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
13698 && !TREE_THIS_VOLATILE (parm)
13699 && !needs_to_live_in_memory (parm))
13700 DECL_GIMPLE_REG_P (parm) = 1;
13703 ret = DECL_RESULT (fndecl);
13704 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
13705 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
13706 && !needs_to_live_in_memory (ret))
13707 DECL_GIMPLE_REG_P (ret) = 1;
13709 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
13710 asan_poisoned_variables = new hash_set<tree> ();
13711 bind = gimplify_body (fndecl, true);
13712 if (asan_poisoned_variables)
13714 delete asan_poisoned_variables;
13715 asan_poisoned_variables = NULL;
13718 /* The tree body of the function is no longer needed, replace it
13719 with the new GIMPLE body. */
13720 seq = NULL;
13721 gimple_seq_add_stmt (&seq, bind);
13722 gimple_set_body (fndecl, seq);
13724 /* If we're instrumenting function entry/exit, then prepend the call to
13725 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
13726 catch the exit hook. */
13727 /* ??? Add some way to ignore exceptions for this TFE. */
13728 if (flag_instrument_function_entry_exit
13729 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
13730 /* Do not instrument extern inline functions. */
13731 && !(DECL_DECLARED_INLINE_P (fndecl)
13732 && DECL_EXTERNAL (fndecl)
13733 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
13734 && !flag_instrument_functions_exclude_p (fndecl))
13736 tree x;
13737 gbind *new_bind;
13738 gimple *tf;
13739 gimple_seq cleanup = NULL, body = NULL;
13740 tree tmp_var, this_fn_addr;
13741 gcall *call;
13743 /* The instrumentation hooks aren't going to call the instrumented
13744 function and the address they receive is expected to be matchable
13745 against symbol addresses. Make sure we don't create a trampoline,
13746 in case the current function is nested. */
13747 this_fn_addr = build_fold_addr_expr (current_function_decl);
13748 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
13750 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
13751 call = gimple_build_call (x, 1, integer_zero_node);
13752 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
13753 gimple_call_set_lhs (call, tmp_var);
13754 gimplify_seq_add_stmt (&cleanup, call);
13755 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
13756 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
13757 gimplify_seq_add_stmt (&cleanup, call);
13758 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
13760 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
13761 call = gimple_build_call (x, 1, integer_zero_node);
13762 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
13763 gimple_call_set_lhs (call, tmp_var);
13764 gimplify_seq_add_stmt (&body, call);
13765 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
13766 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
13767 gimplify_seq_add_stmt (&body, call);
13768 gimplify_seq_add_stmt (&body, tf);
13769 new_bind = gimple_build_bind (NULL, body, NULL);
13771 /* Replace the current function body with the body
13772 wrapped in the try/finally TF. */
13773 seq = NULL;
13774 gimple_seq_add_stmt (&seq, new_bind);
13775 gimple_set_body (fndecl, seq);
13776 bind = new_bind;
13779 if (sanitize_flags_p (SANITIZE_THREAD))
13781 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
13782 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
13783 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
13784 /* Replace the current function body with the body
13785 wrapped in the try/finally TF. */
13786 seq = NULL;
13787 gimple_seq_add_stmt (&seq, new_bind);
13788 gimple_set_body (fndecl, seq);
13791 DECL_SAVED_TREE (fndecl) = NULL_TREE;
13792 cfun->curr_properties |= PROP_gimple_any;
13794 pop_cfun ();
13796 dump_function (TDI_gimple, fndecl);
13799 /* Return a dummy expression of type TYPE in order to keep going after an
13800 error. */
13802 static tree
13803 dummy_object (tree type)
13805 tree t = build_int_cst (build_pointer_type (type), 0);
13806 return build2 (MEM_REF, type, t, t);
13809 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
13810 builtin function, but a very special sort of operator. */
13812 enum gimplify_status
13813 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
13814 gimple_seq *post_p ATTRIBUTE_UNUSED)
13816 tree promoted_type, have_va_type;
13817 tree valist = TREE_OPERAND (*expr_p, 0);
13818 tree type = TREE_TYPE (*expr_p);
13819 tree t, tag, aptag;
13820 location_t loc = EXPR_LOCATION (*expr_p);
13822 /* Verify that valist is of the proper type. */
13823 have_va_type = TREE_TYPE (valist);
13824 if (have_va_type == error_mark_node)
13825 return GS_ERROR;
13826 have_va_type = targetm.canonical_va_list_type (have_va_type);
13827 if (have_va_type == NULL_TREE
13828 && POINTER_TYPE_P (TREE_TYPE (valist)))
13829 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
13830 have_va_type
13831 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
13832 gcc_assert (have_va_type != NULL_TREE);
13834 /* Generate a diagnostic for requesting data of a type that cannot
13835 be passed through `...' due to type promotion at the call site. */
13836 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
13837 != type)
13839 static bool gave_help;
13840 bool warned;
13841 /* Use the expansion point to handle cases such as passing bool (defined
13842 in a system header) through `...'. */
13843 location_t xloc
13844 = expansion_point_location_if_in_system_header (loc);
13846 /* Unfortunately, this is merely undefined, rather than a constraint
13847 violation, so we cannot make this an error. If this call is never
13848 executed, the program is still strictly conforming. */
13849 auto_diagnostic_group d;
13850 warned = warning_at (xloc, 0,
13851 "%qT is promoted to %qT when passed through %<...%>",
13852 type, promoted_type);
13853 if (!gave_help && warned)
13855 gave_help = true;
13856 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
13857 promoted_type, type);
13860 /* We can, however, treat "undefined" any way we please.
13861 Call abort to encourage the user to fix the program. */
13862 if (warned)
13863 inform (xloc, "if this code is reached, the program will abort");
13864 /* Before the abort, allow the evaluation of the va_list
13865 expression to exit or longjmp. */
13866 gimplify_and_add (valist, pre_p);
13867 t = build_call_expr_loc (loc,
13868 builtin_decl_implicit (BUILT_IN_TRAP), 0);
13869 gimplify_and_add (t, pre_p);
13871 /* This is dead code, but go ahead and finish so that the
13872 mode of the result comes out right. */
13873 *expr_p = dummy_object (type);
13874 return GS_ALL_DONE;
13877 tag = build_int_cst (build_pointer_type (type), 0);
13878 aptag = build_int_cst (TREE_TYPE (valist), 0);
13880 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
13881 valist, tag, aptag);
13883 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
13884 needs to be expanded. */
13885 cfun->curr_properties &= ~PROP_gimple_lva;
13887 return GS_OK;
13890 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
13892 DST/SRC are the destination and source respectively. You can pass
13893 ungimplified trees in DST or SRC, in which case they will be
13894 converted to a gimple operand if necessary.
13896 This function returns the newly created GIMPLE_ASSIGN tuple. */
13898 gimple *
13899 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
13901 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
13902 gimplify_and_add (t, seq_p);
13903 ggc_free (t);
13904 return gimple_seq_last_stmt (*seq_p);
13907 inline hashval_t
13908 gimplify_hasher::hash (const elt_t *p)
13910 tree t = p->val;
13911 return iterative_hash_expr (t, 0);
13914 inline bool
13915 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
13917 tree t1 = p1->val;
13918 tree t2 = p2->val;
13919 enum tree_code code = TREE_CODE (t1);
13921 if (TREE_CODE (t2) != code
13922 || TREE_TYPE (t1) != TREE_TYPE (t2))
13923 return false;
13925 if (!operand_equal_p (t1, t2, 0))
13926 return false;
13928 /* Only allow them to compare equal if they also hash equal; otherwise
13929 results are nondeterminate, and we fail bootstrap comparison. */
13930 gcc_checking_assert (hash (p1) == hash (p2));
13932 return true;