rs6000: Enforce quad_address_p in TImode atomic_load/store (PR80382)
[official-gcc.git] / gcc / gimplify.c
blobc69d5b93534e2243ca23470b9749f70fd7dc7663
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa.h"
54 #include "omp-general.h"
55 #include "omp-low.h"
56 #include "gimple-low.h"
57 #include "cilk.h"
58 #include "gomp-constants.h"
59 #include "tree-dump.h"
60 #include "gimple-walk.h"
61 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
62 #include "builtins.h"
63 #include "asan.h"
64 #include "dbgcnt.h"
66 /* Hash set of poisoned variables in a bind expr. */
67 static hash_set<tree> *asan_poisoned_variables = NULL;
69 enum gimplify_omp_var_data
71 GOVD_SEEN = 1,
72 GOVD_EXPLICIT = 2,
73 GOVD_SHARED = 4,
74 GOVD_PRIVATE = 8,
75 GOVD_FIRSTPRIVATE = 16,
76 GOVD_LASTPRIVATE = 32,
77 GOVD_REDUCTION = 64,
78 GOVD_LOCAL = 128,
79 GOVD_MAP = 256,
80 GOVD_DEBUG_PRIVATE = 512,
81 GOVD_PRIVATE_OUTER_REF = 1024,
82 GOVD_LINEAR = 2048,
83 GOVD_ALIGNED = 4096,
85 /* Flag for GOVD_MAP: don't copy back. */
86 GOVD_MAP_TO_ONLY = 8192,
88 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
89 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
91 GOVD_MAP_0LEN_ARRAY = 32768,
93 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
94 GOVD_MAP_ALWAYS_TO = 65536,
96 /* Flag for shared vars that are or might be stored to in the region. */
97 GOVD_WRITTEN = 131072,
99 /* Flag for GOVD_MAP, if it is a forced mapping. */
100 GOVD_MAP_FORCE = 262144,
102 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
103 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
104 | GOVD_LOCAL)
108 enum omp_region_type
110 ORT_WORKSHARE = 0x00,
111 ORT_SIMD = 0x01,
113 ORT_PARALLEL = 0x02,
114 ORT_COMBINED_PARALLEL = 0x03,
116 ORT_TASK = 0x04,
117 ORT_UNTIED_TASK = 0x05,
119 ORT_TEAMS = 0x08,
120 ORT_COMBINED_TEAMS = 0x09,
122 /* Data region. */
123 ORT_TARGET_DATA = 0x10,
125 /* Data region with offloading. */
126 ORT_TARGET = 0x20,
127 ORT_COMBINED_TARGET = 0x21,
129 /* OpenACC variants. */
130 ORT_ACC = 0x40, /* A generic OpenACC region. */
131 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
132 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
133 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
134 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
136 /* Dummy OpenMP region, used to disable expansion of
137 DECL_VALUE_EXPRs in taskloop pre body. */
138 ORT_NONE = 0x100
141 /* Gimplify hashtable helper. */
143 struct gimplify_hasher : free_ptr_hash <elt_t>
145 static inline hashval_t hash (const elt_t *);
146 static inline bool equal (const elt_t *, const elt_t *);
149 struct gimplify_ctx
151 struct gimplify_ctx *prev_context;
153 vec<gbind *> bind_expr_stack;
154 tree temps;
155 gimple_seq conditional_cleanups;
156 tree exit_label;
157 tree return_temp;
159 vec<tree> case_labels;
160 hash_set<tree> *live_switch_vars;
161 /* The formal temporary table. Should this be persistent? */
162 hash_table<gimplify_hasher> *temp_htab;
164 int conditions;
165 unsigned into_ssa : 1;
166 unsigned allow_rhs_cond_expr : 1;
167 unsigned in_cleanup_point_expr : 1;
168 unsigned keep_stack : 1;
169 unsigned save_stack : 1;
170 unsigned in_switch_expr : 1;
173 struct gimplify_omp_ctx
175 struct gimplify_omp_ctx *outer_context;
176 splay_tree variables;
177 hash_set<tree> *privatized_types;
178 /* Iteration variables in an OMP_FOR. */
179 vec<tree> loop_iter_var;
180 location_t location;
181 enum omp_clause_default_kind default_kind;
182 enum omp_region_type region_type;
183 bool combined_loop;
184 bool distribute;
185 bool target_map_scalars_firstprivate;
186 bool target_map_pointers_as_0len_arrays;
187 bool target_firstprivatize_array_bases;
190 static struct gimplify_ctx *gimplify_ctxp;
191 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
193 /* Forward declaration. */
194 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
195 static hash_map<tree, tree> *oacc_declare_returns;
196 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
197 bool (*) (tree), fallback_t, bool);
199 /* Shorter alias name for the above function for use in gimplify.c
200 only. */
202 static inline void
203 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
205 gimple_seq_add_stmt_without_update (seq_p, gs);
208 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
209 NULL, a new sequence is allocated. This function is
210 similar to gimple_seq_add_seq, but does not scan the operands.
211 During gimplification, we need to manipulate statement sequences
212 before the def/use vectors have been constructed. */
214 static void
215 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
217 gimple_stmt_iterator si;
219 if (src == NULL)
220 return;
222 si = gsi_last (*dst_p);
223 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
227 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
228 and popping gimplify contexts. */
230 static struct gimplify_ctx *ctx_pool = NULL;
232 /* Return a gimplify context struct from the pool. */
234 static inline struct gimplify_ctx *
235 ctx_alloc (void)
237 struct gimplify_ctx * c = ctx_pool;
239 if (c)
240 ctx_pool = c->prev_context;
241 else
242 c = XNEW (struct gimplify_ctx);
244 memset (c, '\0', sizeof (*c));
245 return c;
248 /* Put gimplify context C back into the pool. */
250 static inline void
251 ctx_free (struct gimplify_ctx *c)
253 c->prev_context = ctx_pool;
254 ctx_pool = c;
257 /* Free allocated ctx stack memory. */
259 void
260 free_gimplify_stack (void)
262 struct gimplify_ctx *c;
264 while ((c = ctx_pool))
266 ctx_pool = c->prev_context;
267 free (c);
272 /* Set up a context for the gimplifier. */
274 void
275 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
277 struct gimplify_ctx *c = ctx_alloc ();
279 c->prev_context = gimplify_ctxp;
280 gimplify_ctxp = c;
281 gimplify_ctxp->into_ssa = in_ssa;
282 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
285 /* Tear down a context for the gimplifier. If BODY is non-null, then
286 put the temporaries into the outer BIND_EXPR. Otherwise, put them
287 in the local_decls.
289 BODY is not a sequence, but the first tuple in a sequence. */
291 void
292 pop_gimplify_context (gimple *body)
294 struct gimplify_ctx *c = gimplify_ctxp;
296 gcc_assert (c
297 && (!c->bind_expr_stack.exists ()
298 || c->bind_expr_stack.is_empty ()));
299 c->bind_expr_stack.release ();
300 gimplify_ctxp = c->prev_context;
302 if (body)
303 declare_vars (c->temps, body, false);
304 else
305 record_vars (c->temps);
307 delete c->temp_htab;
308 c->temp_htab = NULL;
309 ctx_free (c);
312 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
314 static void
315 gimple_push_bind_expr (gbind *bind_stmt)
317 gimplify_ctxp->bind_expr_stack.reserve (8);
318 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
321 /* Pop the first element off the stack of bindings. */
323 static void
324 gimple_pop_bind_expr (void)
326 gimplify_ctxp->bind_expr_stack.pop ();
329 /* Return the first element of the stack of bindings. */
331 gbind *
332 gimple_current_bind_expr (void)
334 return gimplify_ctxp->bind_expr_stack.last ();
337 /* Return the stack of bindings created during gimplification. */
339 vec<gbind *>
340 gimple_bind_expr_stack (void)
342 return gimplify_ctxp->bind_expr_stack;
345 /* Return true iff there is a COND_EXPR between us and the innermost
346 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
348 static bool
349 gimple_conditional_context (void)
351 return gimplify_ctxp->conditions > 0;
354 /* Note that we've entered a COND_EXPR. */
356 static void
357 gimple_push_condition (void)
359 #ifdef ENABLE_GIMPLE_CHECKING
360 if (gimplify_ctxp->conditions == 0)
361 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
362 #endif
363 ++(gimplify_ctxp->conditions);
366 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
367 now, add any conditional cleanups we've seen to the prequeue. */
369 static void
370 gimple_pop_condition (gimple_seq *pre_p)
372 int conds = --(gimplify_ctxp->conditions);
374 gcc_assert (conds >= 0);
375 if (conds == 0)
377 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
378 gimplify_ctxp->conditional_cleanups = NULL;
382 /* A stable comparison routine for use with splay trees and DECLs. */
384 static int
385 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
387 tree a = (tree) xa;
388 tree b = (tree) xb;
390 return DECL_UID (a) - DECL_UID (b);
393 /* Create a new omp construct that deals with variable remapping. */
395 static struct gimplify_omp_ctx *
396 new_omp_context (enum omp_region_type region_type)
398 struct gimplify_omp_ctx *c;
400 c = XCNEW (struct gimplify_omp_ctx);
401 c->outer_context = gimplify_omp_ctxp;
402 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
403 c->privatized_types = new hash_set<tree>;
404 c->location = input_location;
405 c->region_type = region_type;
406 if ((region_type & ORT_TASK) == 0)
407 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
408 else
409 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
411 return c;
414 /* Destroy an omp construct that deals with variable remapping. */
416 static void
417 delete_omp_context (struct gimplify_omp_ctx *c)
419 splay_tree_delete (c->variables);
420 delete c->privatized_types;
421 c->loop_iter_var.release ();
422 XDELETE (c);
425 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
426 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
428 /* Both gimplify the statement T and append it to *SEQ_P. This function
429 behaves exactly as gimplify_stmt, but you don't have to pass T as a
430 reference. */
432 void
433 gimplify_and_add (tree t, gimple_seq *seq_p)
435 gimplify_stmt (&t, seq_p);
438 /* Gimplify statement T into sequence *SEQ_P, and return the first
439 tuple in the sequence of generated tuples for this statement.
440 Return NULL if gimplifying T produced no tuples. */
442 static gimple *
443 gimplify_and_return_first (tree t, gimple_seq *seq_p)
445 gimple_stmt_iterator last = gsi_last (*seq_p);
447 gimplify_and_add (t, seq_p);
449 if (!gsi_end_p (last))
451 gsi_next (&last);
452 return gsi_stmt (last);
454 else
455 return gimple_seq_first_stmt (*seq_p);
458 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
459 LHS, or for a call argument. */
461 static bool
462 is_gimple_mem_rhs (tree t)
464 /* If we're dealing with a renamable type, either source or dest must be
465 a renamed variable. */
466 if (is_gimple_reg_type (TREE_TYPE (t)))
467 return is_gimple_val (t);
468 else
469 return is_gimple_val (t) || is_gimple_lvalue (t);
472 /* Return true if T is a CALL_EXPR or an expression that can be
473 assigned to a temporary. Note that this predicate should only be
474 used during gimplification. See the rationale for this in
475 gimplify_modify_expr. */
477 static bool
478 is_gimple_reg_rhs_or_call (tree t)
480 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
481 || TREE_CODE (t) == CALL_EXPR);
484 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
485 this predicate should only be used during gimplification. See the
486 rationale for this in gimplify_modify_expr. */
488 static bool
489 is_gimple_mem_rhs_or_call (tree t)
491 /* If we're dealing with a renamable type, either source or dest must be
492 a renamed variable. */
493 if (is_gimple_reg_type (TREE_TYPE (t)))
494 return is_gimple_val (t);
495 else
496 return (is_gimple_val (t)
497 || is_gimple_lvalue (t)
498 || TREE_CLOBBER_P (t)
499 || TREE_CODE (t) == CALL_EXPR);
502 /* Create a temporary with a name derived from VAL. Subroutine of
503 lookup_tmp_var; nobody else should call this function. */
505 static inline tree
506 create_tmp_from_val (tree val)
508 /* Drop all qualifiers and address-space information from the value type. */
509 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
510 tree var = create_tmp_var (type, get_name (val));
511 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
512 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
513 DECL_GIMPLE_REG_P (var) = 1;
514 return var;
517 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
518 an existing expression temporary. */
520 static tree
521 lookup_tmp_var (tree val, bool is_formal)
523 tree ret;
525 /* If not optimizing, never really reuse a temporary. local-alloc
526 won't allocate any variable that is used in more than one basic
527 block, which means it will go into memory, causing much extra
528 work in reload and final and poorer code generation, outweighing
529 the extra memory allocation here. */
530 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
531 ret = create_tmp_from_val (val);
532 else
534 elt_t elt, *elt_p;
535 elt_t **slot;
537 elt.val = val;
538 if (!gimplify_ctxp->temp_htab)
539 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
540 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
541 if (*slot == NULL)
543 elt_p = XNEW (elt_t);
544 elt_p->val = val;
545 elt_p->temp = ret = create_tmp_from_val (val);
546 *slot = elt_p;
548 else
550 elt_p = *slot;
551 ret = elt_p->temp;
555 return ret;
558 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
560 static tree
561 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
562 bool is_formal, bool allow_ssa)
564 tree t, mod;
566 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
567 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
568 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
569 fb_rvalue);
571 if (allow_ssa
572 && gimplify_ctxp->into_ssa
573 && is_gimple_reg_type (TREE_TYPE (val)))
575 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
576 if (! gimple_in_ssa_p (cfun))
578 const char *name = get_name (val);
579 if (name)
580 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
583 else
584 t = lookup_tmp_var (val, is_formal);
586 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
588 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
590 /* gimplify_modify_expr might want to reduce this further. */
591 gimplify_and_add (mod, pre_p);
592 ggc_free (mod);
594 return t;
597 /* Return a formal temporary variable initialized with VAL. PRE_P is as
598 in gimplify_expr. Only use this function if:
600 1) The value of the unfactored expression represented by VAL will not
601 change between the initialization and use of the temporary, and
602 2) The temporary will not be otherwise modified.
604 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
605 and #2 means it is inappropriate for && temps.
607 For other cases, use get_initialized_tmp_var instead. */
609 tree
610 get_formal_tmp_var (tree val, gimple_seq *pre_p)
612 return internal_get_tmp_var (val, pre_p, NULL, true, true);
615 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
616 are as in gimplify_expr. */
618 tree
619 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
620 bool allow_ssa)
622 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
625 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
626 generate debug info for them; otherwise don't. */
628 void
629 declare_vars (tree vars, gimple *gs, bool debug_info)
631 tree last = vars;
632 if (last)
634 tree temps, block;
636 gbind *scope = as_a <gbind *> (gs);
638 temps = nreverse (last);
640 block = gimple_bind_block (scope);
641 gcc_assert (!block || TREE_CODE (block) == BLOCK);
642 if (!block || !debug_info)
644 DECL_CHAIN (last) = gimple_bind_vars (scope);
645 gimple_bind_set_vars (scope, temps);
647 else
649 /* We need to attach the nodes both to the BIND_EXPR and to its
650 associated BLOCK for debugging purposes. The key point here
651 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
652 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
653 if (BLOCK_VARS (block))
654 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
655 else
657 gimple_bind_set_vars (scope,
658 chainon (gimple_bind_vars (scope), temps));
659 BLOCK_VARS (block) = temps;
665 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
666 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
667 no such upper bound can be obtained. */
669 static void
670 force_constant_size (tree var)
672 /* The only attempt we make is by querying the maximum size of objects
673 of the variable's type. */
675 HOST_WIDE_INT max_size;
677 gcc_assert (VAR_P (var));
679 max_size = max_int_size_in_bytes (TREE_TYPE (var));
681 gcc_assert (max_size >= 0);
683 DECL_SIZE_UNIT (var)
684 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
685 DECL_SIZE (var)
686 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
689 /* Push the temporary variable TMP into the current binding. */
691 void
692 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
694 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
696 /* Later processing assumes that the object size is constant, which might
697 not be true at this point. Force the use of a constant upper bound in
698 this case. */
699 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
700 force_constant_size (tmp);
702 DECL_CONTEXT (tmp) = fn->decl;
703 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
705 record_vars_into (tmp, fn->decl);
708 /* Push the temporary variable TMP into the current binding. */
710 void
711 gimple_add_tmp_var (tree tmp)
713 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
715 /* Later processing assumes that the object size is constant, which might
716 not be true at this point. Force the use of a constant upper bound in
717 this case. */
718 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
719 force_constant_size (tmp);
721 DECL_CONTEXT (tmp) = current_function_decl;
722 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
724 if (gimplify_ctxp)
726 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
727 gimplify_ctxp->temps = tmp;
729 /* Mark temporaries local within the nearest enclosing parallel. */
730 if (gimplify_omp_ctxp)
732 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
733 while (ctx
734 && (ctx->region_type == ORT_WORKSHARE
735 || ctx->region_type == ORT_SIMD
736 || ctx->region_type == ORT_ACC))
737 ctx = ctx->outer_context;
738 if (ctx)
739 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
742 else if (cfun)
743 record_vars (tmp);
744 else
746 gimple_seq body_seq;
748 /* This case is for nested functions. We need to expose the locals
749 they create. */
750 body_seq = gimple_body (current_function_decl);
751 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
757 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
758 nodes that are referenced more than once in GENERIC functions. This is
759 necessary because gimplification (translation into GIMPLE) is performed
760 by modifying tree nodes in-place, so gimplication of a shared node in a
761 first context could generate an invalid GIMPLE form in a second context.
763 This is achieved with a simple mark/copy/unmark algorithm that walks the
764 GENERIC representation top-down, marks nodes with TREE_VISITED the first
765 time it encounters them, duplicates them if they already have TREE_VISITED
766 set, and finally removes the TREE_VISITED marks it has set.
768 The algorithm works only at the function level, i.e. it generates a GENERIC
769 representation of a function with no nodes shared within the function when
770 passed a GENERIC function (except for nodes that are allowed to be shared).
772 At the global level, it is also necessary to unshare tree nodes that are
773 referenced in more than one function, for the same aforementioned reason.
774 This requires some cooperation from the front-end. There are 2 strategies:
776 1. Manual unsharing. The front-end needs to call unshare_expr on every
777 expression that might end up being shared across functions.
779 2. Deep unsharing. This is an extension of regular unsharing. Instead
780 of calling unshare_expr on expressions that might be shared across
781 functions, the front-end pre-marks them with TREE_VISITED. This will
782 ensure that they are unshared on the first reference within functions
783 when the regular unsharing algorithm runs. The counterpart is that
784 this algorithm must look deeper than for manual unsharing, which is
785 specified by LANG_HOOKS_DEEP_UNSHARING.
787 If there are only few specific cases of node sharing across functions, it is
788 probably easier for a front-end to unshare the expressions manually. On the
789 contrary, if the expressions generated at the global level are as widespread
790 as expressions generated within functions, deep unsharing is very likely the
791 way to go. */
793 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
794 These nodes model computations that must be done once. If we were to
795 unshare something like SAVE_EXPR(i++), the gimplification process would
796 create wrong code. However, if DATA is non-null, it must hold a pointer
797 set that is used to unshare the subtrees of these nodes. */
799 static tree
800 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
802 tree t = *tp;
803 enum tree_code code = TREE_CODE (t);
805 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
806 copy their subtrees if we can make sure to do it only once. */
807 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
809 if (data && !((hash_set<tree> *)data)->add (t))
811 else
812 *walk_subtrees = 0;
815 /* Stop at types, decls, constants like copy_tree_r. */
816 else if (TREE_CODE_CLASS (code) == tcc_type
817 || TREE_CODE_CLASS (code) == tcc_declaration
818 || TREE_CODE_CLASS (code) == tcc_constant
819 /* We can't do anything sensible with a BLOCK used as an
820 expression, but we also can't just die when we see it
821 because of non-expression uses. So we avert our eyes
822 and cross our fingers. Silly Java. */
823 || code == BLOCK)
824 *walk_subtrees = 0;
826 /* Cope with the statement expression extension. */
827 else if (code == STATEMENT_LIST)
830 /* Leave the bulk of the work to copy_tree_r itself. */
831 else
832 copy_tree_r (tp, walk_subtrees, NULL);
834 return NULL_TREE;
837 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
838 If *TP has been visited already, then *TP is deeply copied by calling
839 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
841 static tree
842 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
844 tree t = *tp;
845 enum tree_code code = TREE_CODE (t);
847 /* Skip types, decls, and constants. But we do want to look at their
848 types and the bounds of types. Mark them as visited so we properly
849 unmark their subtrees on the unmark pass. If we've already seen them,
850 don't look down further. */
851 if (TREE_CODE_CLASS (code) == tcc_type
852 || TREE_CODE_CLASS (code) == tcc_declaration
853 || TREE_CODE_CLASS (code) == tcc_constant)
855 if (TREE_VISITED (t))
856 *walk_subtrees = 0;
857 else
858 TREE_VISITED (t) = 1;
861 /* If this node has been visited already, unshare it and don't look
862 any deeper. */
863 else if (TREE_VISITED (t))
865 walk_tree (tp, mostly_copy_tree_r, data, NULL);
866 *walk_subtrees = 0;
869 /* Otherwise, mark the node as visited and keep looking. */
870 else
871 TREE_VISITED (t) = 1;
873 return NULL_TREE;
876 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
877 copy_if_shared_r callback unmodified. */
879 static inline void
880 copy_if_shared (tree *tp, void *data)
882 walk_tree (tp, copy_if_shared_r, data, NULL);
885 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
886 any nested functions. */
888 static void
889 unshare_body (tree fndecl)
891 struct cgraph_node *cgn = cgraph_node::get (fndecl);
892 /* If the language requires deep unsharing, we need a pointer set to make
893 sure we don't repeatedly unshare subtrees of unshareable nodes. */
894 hash_set<tree> *visited
895 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
897 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
898 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
899 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
901 delete visited;
903 if (cgn)
904 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
905 unshare_body (cgn->decl);
908 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
909 Subtrees are walked until the first unvisited node is encountered. */
911 static tree
912 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
914 tree t = *tp;
916 /* If this node has been visited, unmark it and keep looking. */
917 if (TREE_VISITED (t))
918 TREE_VISITED (t) = 0;
920 /* Otherwise, don't look any deeper. */
921 else
922 *walk_subtrees = 0;
924 return NULL_TREE;
927 /* Unmark the visited trees rooted at *TP. */
929 static inline void
930 unmark_visited (tree *tp)
932 walk_tree (tp, unmark_visited_r, NULL, NULL);
935 /* Likewise, but mark all trees as not visited. */
937 static void
938 unvisit_body (tree fndecl)
940 struct cgraph_node *cgn = cgraph_node::get (fndecl);
942 unmark_visited (&DECL_SAVED_TREE (fndecl));
943 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
944 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
946 if (cgn)
947 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
948 unvisit_body (cgn->decl);
951 /* Unconditionally make an unshared copy of EXPR. This is used when using
952 stored expressions which span multiple functions, such as BINFO_VTABLE,
953 as the normal unsharing process can't tell that they're shared. */
955 tree
956 unshare_expr (tree expr)
958 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
959 return expr;
962 /* Worker for unshare_expr_without_location. */
964 static tree
965 prune_expr_location (tree *tp, int *walk_subtrees, void *)
967 if (EXPR_P (*tp))
968 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
969 else
970 *walk_subtrees = 0;
971 return NULL_TREE;
974 /* Similar to unshare_expr but also prune all expression locations
975 from EXPR. */
977 tree
978 unshare_expr_without_location (tree expr)
980 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
981 if (EXPR_P (expr))
982 walk_tree (&expr, prune_expr_location, NULL, NULL);
983 return expr;
986 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
987 contain statements and have a value. Assign its value to a temporary
988 and give it void_type_node. Return the temporary, or NULL_TREE if
989 WRAPPER was already void. */
991 tree
992 voidify_wrapper_expr (tree wrapper, tree temp)
994 tree type = TREE_TYPE (wrapper);
995 if (type && !VOID_TYPE_P (type))
997 tree *p;
999 /* Set p to point to the body of the wrapper. Loop until we find
1000 something that isn't a wrapper. */
1001 for (p = &wrapper; p && *p; )
1003 switch (TREE_CODE (*p))
1005 case BIND_EXPR:
1006 TREE_SIDE_EFFECTS (*p) = 1;
1007 TREE_TYPE (*p) = void_type_node;
1008 /* For a BIND_EXPR, the body is operand 1. */
1009 p = &BIND_EXPR_BODY (*p);
1010 break;
1012 case CLEANUP_POINT_EXPR:
1013 case TRY_FINALLY_EXPR:
1014 case TRY_CATCH_EXPR:
1015 TREE_SIDE_EFFECTS (*p) = 1;
1016 TREE_TYPE (*p) = void_type_node;
1017 p = &TREE_OPERAND (*p, 0);
1018 break;
1020 case STATEMENT_LIST:
1022 tree_stmt_iterator i = tsi_last (*p);
1023 TREE_SIDE_EFFECTS (*p) = 1;
1024 TREE_TYPE (*p) = void_type_node;
1025 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1027 break;
1029 case COMPOUND_EXPR:
1030 /* Advance to the last statement. Set all container types to
1031 void. */
1032 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1034 TREE_SIDE_EFFECTS (*p) = 1;
1035 TREE_TYPE (*p) = void_type_node;
1037 break;
1039 case TRANSACTION_EXPR:
1040 TREE_SIDE_EFFECTS (*p) = 1;
1041 TREE_TYPE (*p) = void_type_node;
1042 p = &TRANSACTION_EXPR_BODY (*p);
1043 break;
1045 default:
1046 /* Assume that any tree upon which voidify_wrapper_expr is
1047 directly called is a wrapper, and that its body is op0. */
1048 if (p == &wrapper)
1050 TREE_SIDE_EFFECTS (*p) = 1;
1051 TREE_TYPE (*p) = void_type_node;
1052 p = &TREE_OPERAND (*p, 0);
1053 break;
1055 goto out;
1059 out:
1060 if (p == NULL || IS_EMPTY_STMT (*p))
1061 temp = NULL_TREE;
1062 else if (temp)
1064 /* The wrapper is on the RHS of an assignment that we're pushing
1065 down. */
1066 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1067 || TREE_CODE (temp) == MODIFY_EXPR);
1068 TREE_OPERAND (temp, 1) = *p;
1069 *p = temp;
1071 else
1073 temp = create_tmp_var (type, "retval");
1074 *p = build2 (INIT_EXPR, type, temp, *p);
1077 return temp;
1080 return NULL_TREE;
1083 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1084 a temporary through which they communicate. */
1086 static void
1087 build_stack_save_restore (gcall **save, gcall **restore)
1089 tree tmp_var;
1091 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1092 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1093 gimple_call_set_lhs (*save, tmp_var);
1095 *restore
1096 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1097 1, tmp_var);
1100 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1102 static tree
1103 build_asan_poison_call_expr (tree decl)
1105 /* Do not poison variables that have size equal to zero. */
1106 tree unit_size = DECL_SIZE_UNIT (decl);
1107 if (zerop (unit_size))
1108 return NULL_TREE;
1110 tree base = build_fold_addr_expr (decl);
1112 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1113 void_type_node, 3,
1114 build_int_cst (integer_type_node,
1115 ASAN_MARK_POISON),
1116 base, unit_size);
1119 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1120 on POISON flag, shadow memory of a DECL variable. The call will be
1121 put on location identified by IT iterator, where BEFORE flag drives
1122 position where the stmt will be put. */
1124 static void
1125 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1126 bool before)
1128 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1129 if (gimplify_omp_ctxp)
1130 return;
1132 tree unit_size = DECL_SIZE_UNIT (decl);
1133 tree base = build_fold_addr_expr (decl);
1135 /* Do not poison variables that have size equal to zero. */
1136 if (zerop (unit_size))
1137 return;
1139 /* It's necessary to have all stack variables aligned to ASAN granularity
1140 bytes. */
1141 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1142 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1144 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1146 gimple *g
1147 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1148 build_int_cst (integer_type_node, flags),
1149 base, unit_size);
1151 if (before)
1152 gsi_insert_before (it, g, GSI_NEW_STMT);
1153 else
1154 gsi_insert_after (it, g, GSI_NEW_STMT);
1157 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1158 either poisons or unpoisons a DECL. Created statement is appended
1159 to SEQ_P gimple sequence. */
1161 static void
1162 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1164 gimple_stmt_iterator it = gsi_last (*seq_p);
1165 bool before = false;
1167 if (gsi_end_p (it))
1168 before = true;
1170 asan_poison_variable (decl, poison, &it, before);
1173 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1175 static int
1176 sort_by_decl_uid (const void *a, const void *b)
1178 const tree *t1 = (const tree *)a;
1179 const tree *t2 = (const tree *)b;
1181 int uid1 = DECL_UID (*t1);
1182 int uid2 = DECL_UID (*t2);
1184 if (uid1 < uid2)
1185 return -1;
1186 else if (uid1 > uid2)
1187 return 1;
1188 else
1189 return 0;
1192 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1193 depending on POISON flag. Created statement is appended
1194 to SEQ_P gimple sequence. */
1196 static void
1197 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1199 unsigned c = variables->elements ();
1200 if (c == 0)
1201 return;
1203 auto_vec<tree> sorted_variables (c);
1205 for (hash_set<tree>::iterator it = variables->begin ();
1206 it != variables->end (); ++it)
1207 sorted_variables.safe_push (*it);
1209 sorted_variables.qsort (sort_by_decl_uid);
1211 unsigned i;
1212 tree var;
1213 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1215 asan_poison_variable (var, poison, seq_p);
1217 /* Add use_after_scope_memory attribute for the variable in order
1218 to prevent re-written into SSA. */
1219 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1220 DECL_ATTRIBUTES (var)))
1221 DECL_ATTRIBUTES (var)
1222 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1223 integer_one_node,
1224 DECL_ATTRIBUTES (var));
1228 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1230 static enum gimplify_status
1231 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1233 tree bind_expr = *expr_p;
1234 bool old_keep_stack = gimplify_ctxp->keep_stack;
1235 bool old_save_stack = gimplify_ctxp->save_stack;
1236 tree t;
1237 gbind *bind_stmt;
1238 gimple_seq body, cleanup;
1239 gcall *stack_save;
1240 location_t start_locus = 0, end_locus = 0;
1241 tree ret_clauses = NULL;
1243 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1245 /* Mark variables seen in this bind expr. */
1246 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1248 if (VAR_P (t))
1250 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1252 /* Mark variable as local. */
1253 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1254 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1255 || splay_tree_lookup (ctx->variables,
1256 (splay_tree_key) t) == NULL))
1258 if (ctx->region_type == ORT_SIMD
1259 && TREE_ADDRESSABLE (t)
1260 && !TREE_STATIC (t))
1261 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1262 else
1263 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1266 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1268 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1269 cfun->has_local_explicit_reg_vars = true;
1272 /* Preliminarily mark non-addressed complex variables as eligible
1273 for promotion to gimple registers. We'll transform their uses
1274 as we find them. */
1275 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1276 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1277 && !TREE_THIS_VOLATILE (t)
1278 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1279 && !needs_to_live_in_memory (t))
1280 DECL_GIMPLE_REG_P (t) = 1;
1283 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1284 BIND_EXPR_BLOCK (bind_expr));
1285 gimple_push_bind_expr (bind_stmt);
1287 gimplify_ctxp->keep_stack = false;
1288 gimplify_ctxp->save_stack = false;
1290 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1291 body = NULL;
1292 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1293 gimple_bind_set_body (bind_stmt, body);
1295 /* Source location wise, the cleanup code (stack_restore and clobbers)
1296 belongs to the end of the block, so propagate what we have. The
1297 stack_save operation belongs to the beginning of block, which we can
1298 infer from the bind_expr directly if the block has no explicit
1299 assignment. */
1300 if (BIND_EXPR_BLOCK (bind_expr))
1302 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1303 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1305 if (start_locus == 0)
1306 start_locus = EXPR_LOCATION (bind_expr);
1308 cleanup = NULL;
1309 stack_save = NULL;
1311 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1312 the stack space allocated to the VLAs. */
1313 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1315 gcall *stack_restore;
1317 /* Save stack on entry and restore it on exit. Add a try_finally
1318 block to achieve this. */
1319 build_stack_save_restore (&stack_save, &stack_restore);
1321 gimple_set_location (stack_save, start_locus);
1322 gimple_set_location (stack_restore, end_locus);
1324 gimplify_seq_add_stmt (&cleanup, stack_restore);
1327 /* Add clobbers for all variables that go out of scope. */
1328 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1330 if (VAR_P (t)
1331 && !is_global_var (t)
1332 && DECL_CONTEXT (t) == current_function_decl)
1334 if (!DECL_HARD_REGISTER (t)
1335 && !TREE_THIS_VOLATILE (t)
1336 && !DECL_HAS_VALUE_EXPR_P (t)
1337 /* Only care for variables that have to be in memory. Others
1338 will be rewritten into SSA names, hence moved to the
1339 top-level. */
1340 && !is_gimple_reg (t)
1341 && flag_stack_reuse != SR_NONE)
1343 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1344 gimple *clobber_stmt;
1345 TREE_THIS_VOLATILE (clobber) = 1;
1346 clobber_stmt = gimple_build_assign (t, clobber);
1347 gimple_set_location (clobber_stmt, end_locus);
1348 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1351 if (flag_openacc && oacc_declare_returns != NULL)
1353 tree *c = oacc_declare_returns->get (t);
1354 if (c != NULL)
1356 if (ret_clauses)
1357 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1359 ret_clauses = *c;
1361 oacc_declare_returns->remove (t);
1363 if (oacc_declare_returns->elements () == 0)
1365 delete oacc_declare_returns;
1366 oacc_declare_returns = NULL;
1372 if (asan_poisoned_variables != NULL
1373 && asan_poisoned_variables->contains (t))
1375 asan_poisoned_variables->remove (t);
1376 asan_poison_variable (t, true, &cleanup);
1379 if (gimplify_ctxp->live_switch_vars != NULL
1380 && gimplify_ctxp->live_switch_vars->contains (t))
1381 gimplify_ctxp->live_switch_vars->remove (t);
1384 if (ret_clauses)
1386 gomp_target *stmt;
1387 gimple_stmt_iterator si = gsi_start (cleanup);
1389 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1390 ret_clauses);
1391 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1394 if (cleanup)
1396 gtry *gs;
1397 gimple_seq new_body;
1399 new_body = NULL;
1400 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1401 GIMPLE_TRY_FINALLY);
1403 if (stack_save)
1404 gimplify_seq_add_stmt (&new_body, stack_save);
1405 gimplify_seq_add_stmt (&new_body, gs);
1406 gimple_bind_set_body (bind_stmt, new_body);
1409 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1410 if (!gimplify_ctxp->keep_stack)
1411 gimplify_ctxp->keep_stack = old_keep_stack;
1412 gimplify_ctxp->save_stack = old_save_stack;
1414 gimple_pop_bind_expr ();
1416 gimplify_seq_add_stmt (pre_p, bind_stmt);
1418 if (temp)
1420 *expr_p = temp;
1421 return GS_OK;
1424 *expr_p = NULL_TREE;
1425 return GS_ALL_DONE;
1428 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1429 GIMPLE value, it is assigned to a new temporary and the statement is
1430 re-written to return the temporary.
1432 PRE_P points to the sequence where side effects that must happen before
1433 STMT should be stored. */
1435 static enum gimplify_status
1436 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1438 greturn *ret;
1439 tree ret_expr = TREE_OPERAND (stmt, 0);
1440 tree result_decl, result;
1442 if (ret_expr == error_mark_node)
1443 return GS_ERROR;
1445 /* Implicit _Cilk_sync must be inserted right before any return statement
1446 if there is a _Cilk_spawn in the function. If the user has provided a
1447 _Cilk_sync, the optimizer should remove this duplicate one. */
1448 if (fn_contains_cilk_spawn_p (cfun))
1450 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1451 gimplify_and_add (impl_sync, pre_p);
1454 if (!ret_expr
1455 || TREE_CODE (ret_expr) == RESULT_DECL
1456 || ret_expr == error_mark_node)
1458 greturn *ret = gimple_build_return (ret_expr);
1459 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1460 gimplify_seq_add_stmt (pre_p, ret);
1461 return GS_ALL_DONE;
1464 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1465 result_decl = NULL_TREE;
1466 else
1468 result_decl = TREE_OPERAND (ret_expr, 0);
1470 /* See through a return by reference. */
1471 if (TREE_CODE (result_decl) == INDIRECT_REF)
1472 result_decl = TREE_OPERAND (result_decl, 0);
1474 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1475 || TREE_CODE (ret_expr) == INIT_EXPR)
1476 && TREE_CODE (result_decl) == RESULT_DECL);
1479 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1480 Recall that aggregate_value_p is FALSE for any aggregate type that is
1481 returned in registers. If we're returning values in registers, then
1482 we don't want to extend the lifetime of the RESULT_DECL, particularly
1483 across another call. In addition, for those aggregates for which
1484 hard_function_value generates a PARALLEL, we'll die during normal
1485 expansion of structure assignments; there's special code in expand_return
1486 to handle this case that does not exist in expand_expr. */
1487 if (!result_decl)
1488 result = NULL_TREE;
1489 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1491 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1493 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1494 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1495 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1496 should be effectively allocated by the caller, i.e. all calls to
1497 this function must be subject to the Return Slot Optimization. */
1498 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1499 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1501 result = result_decl;
1503 else if (gimplify_ctxp->return_temp)
1504 result = gimplify_ctxp->return_temp;
1505 else
1507 result = create_tmp_reg (TREE_TYPE (result_decl));
1509 /* ??? With complex control flow (usually involving abnormal edges),
1510 we can wind up warning about an uninitialized value for this. Due
1511 to how this variable is constructed and initialized, this is never
1512 true. Give up and never warn. */
1513 TREE_NO_WARNING (result) = 1;
1515 gimplify_ctxp->return_temp = result;
1518 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1519 Then gimplify the whole thing. */
1520 if (result != result_decl)
1521 TREE_OPERAND (ret_expr, 0) = result;
1523 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1525 ret = gimple_build_return (result);
1526 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1527 gimplify_seq_add_stmt (pre_p, ret);
1529 return GS_ALL_DONE;
1532 /* Gimplify a variable-length array DECL. */
1534 static void
1535 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1537 /* This is a variable-sized decl. Simplify its size and mark it
1538 for deferred expansion. */
1539 tree t, addr, ptr_type;
1541 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1542 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1544 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1545 if (DECL_HAS_VALUE_EXPR_P (decl))
1546 return;
1548 /* All occurrences of this decl in final gimplified code will be
1549 replaced by indirection. Setting DECL_VALUE_EXPR does two
1550 things: First, it lets the rest of the gimplifier know what
1551 replacement to use. Second, it lets the debug info know
1552 where to find the value. */
1553 ptr_type = build_pointer_type (TREE_TYPE (decl));
1554 addr = create_tmp_var (ptr_type, get_name (decl));
1555 DECL_IGNORED_P (addr) = 0;
1556 t = build_fold_indirect_ref (addr);
1557 TREE_THIS_NOTRAP (t) = 1;
1558 SET_DECL_VALUE_EXPR (decl, t);
1559 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1561 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1562 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1563 size_int (DECL_ALIGN (decl)));
1564 /* The call has been built for a variable-sized object. */
1565 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1566 t = fold_convert (ptr_type, t);
1567 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1569 gimplify_and_add (t, seq_p);
1572 /* A helper function to be called via walk_tree. Mark all labels under *TP
1573 as being forced. To be called for DECL_INITIAL of static variables. */
1575 static tree
1576 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1578 if (TYPE_P (*tp))
1579 *walk_subtrees = 0;
1580 if (TREE_CODE (*tp) == LABEL_DECL)
1582 FORCED_LABEL (*tp) = 1;
1583 cfun->has_forced_label_in_static = 1;
1586 return NULL_TREE;
1589 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1590 and initialization explicit. */
1592 static enum gimplify_status
1593 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1595 tree stmt = *stmt_p;
1596 tree decl = DECL_EXPR_DECL (stmt);
1598 *stmt_p = NULL_TREE;
1600 if (TREE_TYPE (decl) == error_mark_node)
1601 return GS_ERROR;
1603 if ((TREE_CODE (decl) == TYPE_DECL
1604 || VAR_P (decl))
1605 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1607 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1608 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1609 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1612 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1613 in case its size expressions contain problematic nodes like CALL_EXPR. */
1614 if (TREE_CODE (decl) == TYPE_DECL
1615 && DECL_ORIGINAL_TYPE (decl)
1616 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1618 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1619 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1620 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1623 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1625 tree init = DECL_INITIAL (decl);
1626 bool is_vla = false;
1628 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1629 || (!TREE_STATIC (decl)
1630 && flag_stack_check == GENERIC_STACK_CHECK
1631 && compare_tree_int (DECL_SIZE_UNIT (decl),
1632 STACK_CHECK_MAX_VAR_SIZE) > 0))
1634 gimplify_vla_decl (decl, seq_p);
1635 is_vla = true;
1638 if (asan_poisoned_variables
1639 && !is_vla
1640 && TREE_ADDRESSABLE (decl)
1641 && !TREE_STATIC (decl)
1642 && !DECL_HAS_VALUE_EXPR_P (decl)
1643 && dbg_cnt (asan_use_after_scope))
1645 asan_poisoned_variables->add (decl);
1646 asan_poison_variable (decl, false, seq_p);
1647 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1648 gimplify_ctxp->live_switch_vars->add (decl);
1651 /* Some front ends do not explicitly declare all anonymous
1652 artificial variables. We compensate here by declaring the
1653 variables, though it would be better if the front ends would
1654 explicitly declare them. */
1655 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1656 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1657 gimple_add_tmp_var (decl);
1659 if (init && init != error_mark_node)
1661 if (!TREE_STATIC (decl))
1663 DECL_INITIAL (decl) = NULL_TREE;
1664 init = build2 (INIT_EXPR, void_type_node, decl, init);
1665 gimplify_and_add (init, seq_p);
1666 ggc_free (init);
1668 else
1669 /* We must still examine initializers for static variables
1670 as they may contain a label address. */
1671 walk_tree (&init, force_labels_r, NULL, NULL);
1675 return GS_ALL_DONE;
1678 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1679 and replacing the LOOP_EXPR with goto, but if the loop contains an
1680 EXIT_EXPR, we need to append a label for it to jump to. */
1682 static enum gimplify_status
1683 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1685 tree saved_label = gimplify_ctxp->exit_label;
1686 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1688 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1690 gimplify_ctxp->exit_label = NULL_TREE;
1692 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1694 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1696 if (gimplify_ctxp->exit_label)
1697 gimplify_seq_add_stmt (pre_p,
1698 gimple_build_label (gimplify_ctxp->exit_label));
1700 gimplify_ctxp->exit_label = saved_label;
1702 *expr_p = NULL;
1703 return GS_ALL_DONE;
1706 /* Gimplify a statement list onto a sequence. These may be created either
1707 by an enlightened front-end, or by shortcut_cond_expr. */
1709 static enum gimplify_status
1710 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1712 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1714 tree_stmt_iterator i = tsi_start (*expr_p);
1716 while (!tsi_end_p (i))
1718 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1719 tsi_delink (&i);
1722 if (temp)
1724 *expr_p = temp;
1725 return GS_OK;
1728 return GS_ALL_DONE;
1731 /* Callback for walk_gimple_seq. */
1733 static tree
1734 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1735 struct walk_stmt_info *wi)
1737 gimple *stmt = gsi_stmt (*gsi_p);
1739 *handled_ops_p = true;
1740 switch (gimple_code (stmt))
1742 case GIMPLE_TRY:
1743 /* A compiler-generated cleanup or a user-written try block.
1744 If it's empty, don't dive into it--that would result in
1745 worse location info. */
1746 if (gimple_try_eval (stmt) == NULL)
1748 wi->info = stmt;
1749 return integer_zero_node;
1751 /* Fall through. */
1752 case GIMPLE_BIND:
1753 case GIMPLE_CATCH:
1754 case GIMPLE_EH_FILTER:
1755 case GIMPLE_TRANSACTION:
1756 /* Walk the sub-statements. */
1757 *handled_ops_p = false;
1758 break;
1759 case GIMPLE_CALL:
1760 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1762 *handled_ops_p = false;
1763 break;
1765 /* Fall through. */
1766 default:
1767 /* Save the first "real" statement (not a decl/lexical scope/...). */
1768 wi->info = stmt;
1769 return integer_zero_node;
1771 return NULL_TREE;
1774 /* Possibly warn about unreachable statements between switch's controlling
1775 expression and the first case. SEQ is the body of a switch expression. */
1777 static void
1778 maybe_warn_switch_unreachable (gimple_seq seq)
1780 if (!warn_switch_unreachable
1781 /* This warning doesn't play well with Fortran when optimizations
1782 are on. */
1783 || lang_GNU_Fortran ()
1784 || seq == NULL)
1785 return;
1787 struct walk_stmt_info wi;
1788 memset (&wi, 0, sizeof (wi));
1789 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1790 gimple *stmt = (gimple *) wi.info;
1792 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1794 if (gimple_code (stmt) == GIMPLE_GOTO
1795 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1796 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1797 /* Don't warn for compiler-generated gotos. These occur
1798 in Duff's devices, for example. */;
1799 else
1800 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1801 "statement will never be executed");
1806 /* A label entry that pairs label and a location. */
1807 struct label_entry
1809 tree label;
1810 location_t loc;
1813 /* Find LABEL in vector of label entries VEC. */
1815 static struct label_entry *
1816 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1818 unsigned int i;
1819 struct label_entry *l;
1821 FOR_EACH_VEC_ELT (*vec, i, l)
1822 if (l->label == label)
1823 return l;
1824 return NULL;
1827 /* Return true if LABEL, a LABEL_DECL, represents a case label
1828 in a vector of labels CASES. */
1830 static bool
1831 case_label_p (const vec<tree> *cases, tree label)
1833 unsigned int i;
1834 tree l;
1836 FOR_EACH_VEC_ELT (*cases, i, l)
1837 if (CASE_LABEL (l) == label)
1838 return true;
1839 return false;
1842 /* Find the last statement in a scope STMT. */
1844 static gimple *
1845 last_stmt_in_scope (gimple *stmt)
1847 if (!stmt)
1848 return NULL;
1850 switch (gimple_code (stmt))
1852 case GIMPLE_BIND:
1854 gbind *bind = as_a <gbind *> (stmt);
1855 stmt = gimple_seq_last_stmt (gimple_bind_body (bind));
1856 return last_stmt_in_scope (stmt);
1859 case GIMPLE_TRY:
1861 gtry *try_stmt = as_a <gtry *> (stmt);
1862 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt));
1863 gimple *last_eval = last_stmt_in_scope (stmt);
1864 if (gimple_stmt_may_fallthru (last_eval)
1865 && (last_eval == NULL
1866 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1867 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1869 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt));
1870 return last_stmt_in_scope (stmt);
1872 else
1873 return last_eval;
1876 default:
1877 return stmt;
1881 /* Collect interesting labels in LABELS and return the statement preceding
1882 another case label, or a user-defined label. */
1884 static gimple *
1885 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1886 auto_vec <struct label_entry> *labels)
1888 gimple *prev = NULL;
1892 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1893 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1895 /* Nested scope. Only look at the last statement of
1896 the innermost scope. */
1897 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1898 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1899 if (last)
1901 prev = last;
1902 /* It might be a label without a location. Use the
1903 location of the scope then. */
1904 if (!gimple_has_location (prev))
1905 gimple_set_location (prev, bind_loc);
1907 gsi_next (gsi_p);
1908 continue;
1911 /* Ifs are tricky. */
1912 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1914 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1915 tree false_lab = gimple_cond_false_label (cond_stmt);
1916 location_t if_loc = gimple_location (cond_stmt);
1918 /* If we have e.g.
1919 if (i > 1) goto <D.2259>; else goto D;
1920 we can't do much with the else-branch. */
1921 if (!DECL_ARTIFICIAL (false_lab))
1922 break;
1924 /* Go on until the false label, then one step back. */
1925 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
1927 gimple *stmt = gsi_stmt (*gsi_p);
1928 if (gimple_code (stmt) == GIMPLE_LABEL
1929 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
1930 break;
1933 /* Not found? Oops. */
1934 if (gsi_end_p (*gsi_p))
1935 break;
1937 struct label_entry l = { false_lab, if_loc };
1938 labels->safe_push (l);
1940 /* Go to the last statement of the then branch. */
1941 gsi_prev (gsi_p);
1943 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1944 <D.1759>:
1945 <stmt>;
1946 goto <D.1761>;
1947 <D.1760>:
1949 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
1950 && !gimple_has_location (gsi_stmt (*gsi_p)))
1952 /* Look at the statement before, it might be
1953 attribute fallthrough, in which case don't warn. */
1954 gsi_prev (gsi_p);
1955 bool fallthru_before_dest
1956 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
1957 gsi_next (gsi_p);
1958 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
1959 if (!fallthru_before_dest)
1961 struct label_entry l = { goto_dest, if_loc };
1962 labels->safe_push (l);
1965 /* And move back. */
1966 gsi_next (gsi_p);
1969 /* Remember the last statement. Skip labels that are of no interest
1970 to us. */
1971 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
1973 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
1974 if (find_label_entry (labels, label))
1975 prev = gsi_stmt (*gsi_p);
1977 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
1979 else
1980 prev = gsi_stmt (*gsi_p);
1981 gsi_next (gsi_p);
1983 while (!gsi_end_p (*gsi_p)
1984 /* Stop if we find a case or a user-defined label. */
1985 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
1986 || !gimple_has_location (gsi_stmt (*gsi_p))));
1988 return prev;
1991 /* Return true if the switch fallthough warning should occur. LABEL is
1992 the label statement that we're falling through to. */
1994 static bool
1995 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
1997 gimple_stmt_iterator gsi = *gsi_p;
1999 /* Don't warn if the label is marked with a "falls through" comment. */
2000 if (FALLTHROUGH_LABEL_P (label))
2001 return false;
2003 /* Don't warn for non-case labels followed by a statement:
2004 case 0:
2005 foo ();
2006 label:
2007 bar ();
2008 as these are likely intentional. */
2009 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2011 tree l;
2012 while (!gsi_end_p (gsi)
2013 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2014 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2015 && !case_label_p (&gimplify_ctxp->case_labels, l))
2016 gsi_next (&gsi);
2017 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2018 return false;
2021 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2022 immediately breaks. */
2023 gsi = *gsi_p;
2025 /* Skip all immediately following labels. */
2026 while (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
2027 gsi_next (&gsi);
2029 /* { ... something; default:; } */
2030 if (gsi_end_p (gsi)
2031 /* { ... something; default: break; } or
2032 { ... something; default: goto L; } */
2033 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2034 /* { ... something; default: return; } */
2035 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2036 return false;
2038 return true;
2041 /* Callback for walk_gimple_seq. */
2043 static tree
2044 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2045 struct walk_stmt_info *)
2047 gimple *stmt = gsi_stmt (*gsi_p);
2049 *handled_ops_p = true;
2050 switch (gimple_code (stmt))
2052 case GIMPLE_TRY:
2053 case GIMPLE_BIND:
2054 case GIMPLE_CATCH:
2055 case GIMPLE_EH_FILTER:
2056 case GIMPLE_TRANSACTION:
2057 /* Walk the sub-statements. */
2058 *handled_ops_p = false;
2059 break;
2061 /* Find a sequence of form:
2063 GIMPLE_LABEL
2064 [...]
2065 <may fallthru stmt>
2066 GIMPLE_LABEL
2068 and possibly warn. */
2069 case GIMPLE_LABEL:
2071 /* Found a label. Skip all immediately following labels. */
2072 while (!gsi_end_p (*gsi_p)
2073 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2074 gsi_next (gsi_p);
2076 /* There might be no more statements. */
2077 if (gsi_end_p (*gsi_p))
2078 return integer_zero_node;
2080 /* Vector of labels that fall through. */
2081 auto_vec <struct label_entry> labels;
2082 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2084 /* There might be no more statements. */
2085 if (gsi_end_p (*gsi_p))
2086 return integer_zero_node;
2088 gimple *next = gsi_stmt (*gsi_p);
2089 tree label;
2090 /* If what follows is a label, then we may have a fallthrough. */
2091 if (gimple_code (next) == GIMPLE_LABEL
2092 && gimple_has_location (next)
2093 && (label = gimple_label_label (as_a <glabel *> (next)))
2094 && prev != NULL)
2096 struct label_entry *l;
2097 bool warned_p = false;
2098 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2099 /* Quiet. */;
2100 else if (gimple_code (prev) == GIMPLE_LABEL
2101 && (label = gimple_label_label (as_a <glabel *> (prev)))
2102 && (l = find_label_entry (&labels, label)))
2103 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2104 "this statement may fall through");
2105 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2106 /* Try to be clever and don't warn when the statement
2107 can't actually fall through. */
2108 && gimple_stmt_may_fallthru (prev)
2109 && gimple_has_location (prev))
2110 warned_p = warning_at (gimple_location (prev),
2111 OPT_Wimplicit_fallthrough_,
2112 "this statement may fall through");
2113 if (warned_p)
2114 inform (gimple_location (next), "here");
2116 /* Mark this label as processed so as to prevent multiple
2117 warnings in nested switches. */
2118 FALLTHROUGH_LABEL_P (label) = true;
2120 /* So that next warn_implicit_fallthrough_r will start looking for
2121 a new sequence starting with this label. */
2122 gsi_prev (gsi_p);
2125 break;
2126 default:
2127 break;
2129 return NULL_TREE;
2132 /* Warn when a switch case falls through. */
2134 static void
2135 maybe_warn_implicit_fallthrough (gimple_seq seq)
2137 if (!warn_implicit_fallthrough)
2138 return;
2140 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2141 if (!(lang_GNU_C ()
2142 || lang_GNU_CXX ()
2143 || lang_GNU_OBJC ()))
2144 return;
2146 struct walk_stmt_info wi;
2147 memset (&wi, 0, sizeof (wi));
2148 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2151 /* Callback for walk_gimple_seq. */
2153 static tree
2154 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2155 struct walk_stmt_info *)
2157 gimple *stmt = gsi_stmt (*gsi_p);
2159 *handled_ops_p = true;
2160 switch (gimple_code (stmt))
2162 case GIMPLE_TRY:
2163 case GIMPLE_BIND:
2164 case GIMPLE_CATCH:
2165 case GIMPLE_EH_FILTER:
2166 case GIMPLE_TRANSACTION:
2167 /* Walk the sub-statements. */
2168 *handled_ops_p = false;
2169 break;
2170 case GIMPLE_CALL:
2171 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2173 gsi_remove (gsi_p, true);
2174 if (gsi_end_p (*gsi_p))
2175 return integer_zero_node;
2177 bool found = false;
2178 location_t loc = gimple_location (stmt);
2180 gimple_stmt_iterator gsi2 = *gsi_p;
2181 stmt = gsi_stmt (gsi2);
2182 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2184 /* Go on until the artificial label. */
2185 tree goto_dest = gimple_goto_dest (stmt);
2186 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2188 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2189 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2190 == goto_dest)
2191 break;
2194 /* Not found? Stop. */
2195 if (gsi_end_p (gsi2))
2196 break;
2198 /* Look one past it. */
2199 gsi_next (&gsi2);
2202 /* We're looking for a case label or default label here. */
2203 while (!gsi_end_p (gsi2))
2205 stmt = gsi_stmt (gsi2);
2206 if (gimple_code (stmt) == GIMPLE_LABEL)
2208 tree label = gimple_label_label (as_a <glabel *> (stmt));
2209 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2211 found = true;
2212 break;
2215 else
2216 /* Something other than a label. That's not expected. */
2217 break;
2218 gsi_next (&gsi2);
2220 if (!found)
2221 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2222 "a case label or default label");
2224 break;
2225 default:
2226 break;
2228 return NULL_TREE;
2231 /* Expand all FALLTHROUGH () calls in SEQ. */
2233 static void
2234 expand_FALLTHROUGH (gimple_seq *seq_p)
2236 struct walk_stmt_info wi;
2237 memset (&wi, 0, sizeof (wi));
2238 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2242 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2243 branch to. */
2245 static enum gimplify_status
2246 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2248 tree switch_expr = *expr_p;
2249 gimple_seq switch_body_seq = NULL;
2250 enum gimplify_status ret;
2251 tree index_type = TREE_TYPE (switch_expr);
2252 if (index_type == NULL_TREE)
2253 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2255 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2256 fb_rvalue);
2257 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2258 return ret;
2260 if (SWITCH_BODY (switch_expr))
2262 vec<tree> labels;
2263 vec<tree> saved_labels;
2264 hash_set<tree> *saved_live_switch_vars = NULL;
2265 tree default_case = NULL_TREE;
2266 gswitch *switch_stmt;
2268 /* If someone can be bothered to fill in the labels, they can
2269 be bothered to null out the body too. */
2270 gcc_assert (!SWITCH_LABELS (switch_expr));
2272 /* Save old labels, get new ones from body, then restore the old
2273 labels. Save all the things from the switch body to append after. */
2274 saved_labels = gimplify_ctxp->case_labels;
2275 gimplify_ctxp->case_labels.create (8);
2277 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2278 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2279 if (TREE_CODE (SWITCH_BODY (switch_expr)) == BIND_EXPR)
2280 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2281 else
2282 gimplify_ctxp->live_switch_vars = NULL;
2284 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2285 gimplify_ctxp->in_switch_expr = true;
2287 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2289 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2290 maybe_warn_switch_unreachable (switch_body_seq);
2291 maybe_warn_implicit_fallthrough (switch_body_seq);
2292 /* Only do this for the outermost GIMPLE_SWITCH. */
2293 if (!gimplify_ctxp->in_switch_expr)
2294 expand_FALLTHROUGH (&switch_body_seq);
2296 labels = gimplify_ctxp->case_labels;
2297 gimplify_ctxp->case_labels = saved_labels;
2299 if (gimplify_ctxp->live_switch_vars)
2301 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2302 delete gimplify_ctxp->live_switch_vars;
2304 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2306 preprocess_case_label_vec_for_gimple (labels, index_type,
2307 &default_case);
2309 if (!default_case)
2311 glabel *new_default;
2313 default_case
2314 = build_case_label (NULL_TREE, NULL_TREE,
2315 create_artificial_label (UNKNOWN_LOCATION));
2316 new_default = gimple_build_label (CASE_LABEL (default_case));
2317 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2320 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2321 default_case, labels);
2322 gimplify_seq_add_stmt (pre_p, switch_stmt);
2323 gimplify_seq_add_seq (pre_p, switch_body_seq);
2324 labels.release ();
2326 else
2327 gcc_assert (SWITCH_LABELS (switch_expr));
2329 return GS_ALL_DONE;
2332 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2334 static enum gimplify_status
2335 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2337 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2338 == current_function_decl);
2340 glabel *label_stmt = gimple_build_label (LABEL_EXPR_LABEL (*expr_p));
2341 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2342 gimplify_seq_add_stmt (pre_p, label_stmt);
2344 return GS_ALL_DONE;
2347 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2349 static enum gimplify_status
2350 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2352 struct gimplify_ctx *ctxp;
2353 glabel *label_stmt;
2355 /* Invalid programs can play Duff's Device type games with, for example,
2356 #pragma omp parallel. At least in the C front end, we don't
2357 detect such invalid branches until after gimplification, in the
2358 diagnose_omp_blocks pass. */
2359 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2360 if (ctxp->case_labels.exists ())
2361 break;
2363 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2364 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2365 ctxp->case_labels.safe_push (*expr_p);
2366 gimplify_seq_add_stmt (pre_p, label_stmt);
2368 return GS_ALL_DONE;
2371 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2372 if necessary. */
2374 tree
2375 build_and_jump (tree *label_p)
2377 if (label_p == NULL)
2378 /* If there's nowhere to jump, just fall through. */
2379 return NULL_TREE;
2381 if (*label_p == NULL_TREE)
2383 tree label = create_artificial_label (UNKNOWN_LOCATION);
2384 *label_p = label;
2387 return build1 (GOTO_EXPR, void_type_node, *label_p);
2390 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2391 This also involves building a label to jump to and communicating it to
2392 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2394 static enum gimplify_status
2395 gimplify_exit_expr (tree *expr_p)
2397 tree cond = TREE_OPERAND (*expr_p, 0);
2398 tree expr;
2400 expr = build_and_jump (&gimplify_ctxp->exit_label);
2401 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2402 *expr_p = expr;
2404 return GS_OK;
2407 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2408 different from its canonical type, wrap the whole thing inside a
2409 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2410 type.
2412 The canonical type of a COMPONENT_REF is the type of the field being
2413 referenced--unless the field is a bit-field which can be read directly
2414 in a smaller mode, in which case the canonical type is the
2415 sign-appropriate type corresponding to that mode. */
2417 static void
2418 canonicalize_component_ref (tree *expr_p)
2420 tree expr = *expr_p;
2421 tree type;
2423 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2425 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2426 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2427 else
2428 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2430 /* One could argue that all the stuff below is not necessary for
2431 the non-bitfield case and declare it a FE error if type
2432 adjustment would be needed. */
2433 if (TREE_TYPE (expr) != type)
2435 #ifdef ENABLE_TYPES_CHECKING
2436 tree old_type = TREE_TYPE (expr);
2437 #endif
2438 int type_quals;
2440 /* We need to preserve qualifiers and propagate them from
2441 operand 0. */
2442 type_quals = TYPE_QUALS (type)
2443 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2444 if (TYPE_QUALS (type) != type_quals)
2445 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2447 /* Set the type of the COMPONENT_REF to the underlying type. */
2448 TREE_TYPE (expr) = type;
2450 #ifdef ENABLE_TYPES_CHECKING
2451 /* It is now a FE error, if the conversion from the canonical
2452 type to the original expression type is not useless. */
2453 gcc_assert (useless_type_conversion_p (old_type, type));
2454 #endif
2458 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2459 to foo, embed that change in the ADDR_EXPR by converting
2460 T array[U];
2461 (T *)&array
2463 &array[L]
2464 where L is the lower bound. For simplicity, only do this for constant
2465 lower bound.
2466 The constraint is that the type of &array[L] is trivially convertible
2467 to T *. */
2469 static void
2470 canonicalize_addr_expr (tree *expr_p)
2472 tree expr = *expr_p;
2473 tree addr_expr = TREE_OPERAND (expr, 0);
2474 tree datype, ddatype, pddatype;
2476 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2477 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2478 || TREE_CODE (addr_expr) != ADDR_EXPR)
2479 return;
2481 /* The addr_expr type should be a pointer to an array. */
2482 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2483 if (TREE_CODE (datype) != ARRAY_TYPE)
2484 return;
2486 /* The pointer to element type shall be trivially convertible to
2487 the expression pointer type. */
2488 ddatype = TREE_TYPE (datype);
2489 pddatype = build_pointer_type (ddatype);
2490 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2491 pddatype))
2492 return;
2494 /* The lower bound and element sizes must be constant. */
2495 if (!TYPE_SIZE_UNIT (ddatype)
2496 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2497 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2498 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2499 return;
2501 /* All checks succeeded. Build a new node to merge the cast. */
2502 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2503 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2504 NULL_TREE, NULL_TREE);
2505 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2507 /* We can have stripped a required restrict qualifier above. */
2508 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2509 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2512 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2513 underneath as appropriate. */
2515 static enum gimplify_status
2516 gimplify_conversion (tree *expr_p)
2518 location_t loc = EXPR_LOCATION (*expr_p);
2519 gcc_assert (CONVERT_EXPR_P (*expr_p));
2521 /* Then strip away all but the outermost conversion. */
2522 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2524 /* And remove the outermost conversion if it's useless. */
2525 if (tree_ssa_useless_type_conversion (*expr_p))
2526 *expr_p = TREE_OPERAND (*expr_p, 0);
2528 /* If we still have a conversion at the toplevel,
2529 then canonicalize some constructs. */
2530 if (CONVERT_EXPR_P (*expr_p))
2532 tree sub = TREE_OPERAND (*expr_p, 0);
2534 /* If a NOP conversion is changing the type of a COMPONENT_REF
2535 expression, then canonicalize its type now in order to expose more
2536 redundant conversions. */
2537 if (TREE_CODE (sub) == COMPONENT_REF)
2538 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2540 /* If a NOP conversion is changing a pointer to array of foo
2541 to a pointer to foo, embed that change in the ADDR_EXPR. */
2542 else if (TREE_CODE (sub) == ADDR_EXPR)
2543 canonicalize_addr_expr (expr_p);
2546 /* If we have a conversion to a non-register type force the
2547 use of a VIEW_CONVERT_EXPR instead. */
2548 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2549 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2550 TREE_OPERAND (*expr_p, 0));
2552 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2553 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2554 TREE_SET_CODE (*expr_p, NOP_EXPR);
2556 return GS_OK;
2559 /* Nonlocal VLAs seen in the current function. */
2560 static hash_set<tree> *nonlocal_vlas;
2562 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2563 static tree nonlocal_vla_vars;
2565 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2566 DECL_VALUE_EXPR, and it's worth re-examining things. */
2568 static enum gimplify_status
2569 gimplify_var_or_parm_decl (tree *expr_p)
2571 tree decl = *expr_p;
2573 /* ??? If this is a local variable, and it has not been seen in any
2574 outer BIND_EXPR, then it's probably the result of a duplicate
2575 declaration, for which we've already issued an error. It would
2576 be really nice if the front end wouldn't leak these at all.
2577 Currently the only known culprit is C++ destructors, as seen
2578 in g++.old-deja/g++.jason/binding.C. */
2579 if (VAR_P (decl)
2580 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2581 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2582 && decl_function_context (decl) == current_function_decl)
2584 gcc_assert (seen_error ());
2585 return GS_ERROR;
2588 /* When within an OMP context, notice uses of variables. */
2589 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2590 return GS_ALL_DONE;
2592 /* If the decl is an alias for another expression, substitute it now. */
2593 if (DECL_HAS_VALUE_EXPR_P (decl))
2595 tree value_expr = DECL_VALUE_EXPR (decl);
2597 /* For referenced nonlocal VLAs add a decl for debugging purposes
2598 to the current function. */
2599 if (VAR_P (decl)
2600 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2601 && nonlocal_vlas != NULL
2602 && TREE_CODE (value_expr) == INDIRECT_REF
2603 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2604 && decl_function_context (decl) != current_function_decl)
2606 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2607 while (ctx
2608 && (ctx->region_type == ORT_WORKSHARE
2609 || ctx->region_type == ORT_SIMD
2610 || ctx->region_type == ORT_ACC))
2611 ctx = ctx->outer_context;
2612 if (!ctx && !nonlocal_vlas->add (decl))
2614 tree copy = copy_node (decl);
2616 lang_hooks.dup_lang_specific_decl (copy);
2617 SET_DECL_RTL (copy, 0);
2618 TREE_USED (copy) = 1;
2619 DECL_CHAIN (copy) = nonlocal_vla_vars;
2620 nonlocal_vla_vars = copy;
2621 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2622 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2626 *expr_p = unshare_expr (value_expr);
2627 return GS_OK;
2630 return GS_ALL_DONE;
2633 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2635 static void
2636 recalculate_side_effects (tree t)
2638 enum tree_code code = TREE_CODE (t);
2639 int len = TREE_OPERAND_LENGTH (t);
2640 int i;
2642 switch (TREE_CODE_CLASS (code))
2644 case tcc_expression:
2645 switch (code)
2647 case INIT_EXPR:
2648 case MODIFY_EXPR:
2649 case VA_ARG_EXPR:
2650 case PREDECREMENT_EXPR:
2651 case PREINCREMENT_EXPR:
2652 case POSTDECREMENT_EXPR:
2653 case POSTINCREMENT_EXPR:
2654 /* All of these have side-effects, no matter what their
2655 operands are. */
2656 return;
2658 default:
2659 break;
2661 /* Fall through. */
2663 case tcc_comparison: /* a comparison expression */
2664 case tcc_unary: /* a unary arithmetic expression */
2665 case tcc_binary: /* a binary arithmetic expression */
2666 case tcc_reference: /* a reference */
2667 case tcc_vl_exp: /* a function call */
2668 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2669 for (i = 0; i < len; ++i)
2671 tree op = TREE_OPERAND (t, i);
2672 if (op && TREE_SIDE_EFFECTS (op))
2673 TREE_SIDE_EFFECTS (t) = 1;
2675 break;
2677 case tcc_constant:
2678 /* No side-effects. */
2679 return;
2681 default:
2682 gcc_unreachable ();
2686 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2687 node *EXPR_P.
2689 compound_lval
2690 : min_lval '[' val ']'
2691 | min_lval '.' ID
2692 | compound_lval '[' val ']'
2693 | compound_lval '.' ID
2695 This is not part of the original SIMPLE definition, which separates
2696 array and member references, but it seems reasonable to handle them
2697 together. Also, this way we don't run into problems with union
2698 aliasing; gcc requires that for accesses through a union to alias, the
2699 union reference must be explicit, which was not always the case when we
2700 were splitting up array and member refs.
2702 PRE_P points to the sequence where side effects that must happen before
2703 *EXPR_P should be stored.
2705 POST_P points to the sequence where side effects that must happen after
2706 *EXPR_P should be stored. */
2708 static enum gimplify_status
2709 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2710 fallback_t fallback)
2712 tree *p;
2713 enum gimplify_status ret = GS_ALL_DONE, tret;
2714 int i;
2715 location_t loc = EXPR_LOCATION (*expr_p);
2716 tree expr = *expr_p;
2718 /* Create a stack of the subexpressions so later we can walk them in
2719 order from inner to outer. */
2720 auto_vec<tree, 10> expr_stack;
2722 /* We can handle anything that get_inner_reference can deal with. */
2723 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2725 restart:
2726 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2727 if (TREE_CODE (*p) == INDIRECT_REF)
2728 *p = fold_indirect_ref_loc (loc, *p);
2730 if (handled_component_p (*p))
2732 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2733 additional COMPONENT_REFs. */
2734 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2735 && gimplify_var_or_parm_decl (p) == GS_OK)
2736 goto restart;
2737 else
2738 break;
2740 expr_stack.safe_push (*p);
2743 gcc_assert (expr_stack.length ());
2745 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2746 walked through and P points to the innermost expression.
2748 Java requires that we elaborated nodes in source order. That
2749 means we must gimplify the inner expression followed by each of
2750 the indices, in order. But we can't gimplify the inner
2751 expression until we deal with any variable bounds, sizes, or
2752 positions in order to deal with PLACEHOLDER_EXPRs.
2754 So we do this in three steps. First we deal with the annotations
2755 for any variables in the components, then we gimplify the base,
2756 then we gimplify any indices, from left to right. */
2757 for (i = expr_stack.length () - 1; i >= 0; i--)
2759 tree t = expr_stack[i];
2761 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2763 /* Gimplify the low bound and element type size and put them into
2764 the ARRAY_REF. If these values are set, they have already been
2765 gimplified. */
2766 if (TREE_OPERAND (t, 2) == NULL_TREE)
2768 tree low = unshare_expr (array_ref_low_bound (t));
2769 if (!is_gimple_min_invariant (low))
2771 TREE_OPERAND (t, 2) = low;
2772 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2773 post_p, is_gimple_reg,
2774 fb_rvalue);
2775 ret = MIN (ret, tret);
2778 else
2780 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2781 is_gimple_reg, fb_rvalue);
2782 ret = MIN (ret, tret);
2785 if (TREE_OPERAND (t, 3) == NULL_TREE)
2787 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2788 tree elmt_size = unshare_expr (array_ref_element_size (t));
2789 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2791 /* Divide the element size by the alignment of the element
2792 type (above). */
2793 elmt_size
2794 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2796 if (!is_gimple_min_invariant (elmt_size))
2798 TREE_OPERAND (t, 3) = elmt_size;
2799 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2800 post_p, is_gimple_reg,
2801 fb_rvalue);
2802 ret = MIN (ret, tret);
2805 else
2807 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2808 is_gimple_reg, fb_rvalue);
2809 ret = MIN (ret, tret);
2812 else if (TREE_CODE (t) == COMPONENT_REF)
2814 /* Set the field offset into T and gimplify it. */
2815 if (TREE_OPERAND (t, 2) == NULL_TREE)
2817 tree offset = unshare_expr (component_ref_field_offset (t));
2818 tree field = TREE_OPERAND (t, 1);
2819 tree factor
2820 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2822 /* Divide the offset by its alignment. */
2823 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2825 if (!is_gimple_min_invariant (offset))
2827 TREE_OPERAND (t, 2) = offset;
2828 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2829 post_p, is_gimple_reg,
2830 fb_rvalue);
2831 ret = MIN (ret, tret);
2834 else
2836 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2837 is_gimple_reg, fb_rvalue);
2838 ret = MIN (ret, tret);
2843 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2844 so as to match the min_lval predicate. Failure to do so may result
2845 in the creation of large aggregate temporaries. */
2846 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2847 fallback | fb_lvalue);
2848 ret = MIN (ret, tret);
2850 /* And finally, the indices and operands of ARRAY_REF. During this
2851 loop we also remove any useless conversions. */
2852 for (; expr_stack.length () > 0; )
2854 tree t = expr_stack.pop ();
2856 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2858 /* Gimplify the dimension. */
2859 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2861 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2862 is_gimple_val, fb_rvalue);
2863 ret = MIN (ret, tret);
2867 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2869 /* The innermost expression P may have originally had
2870 TREE_SIDE_EFFECTS set which would have caused all the outer
2871 expressions in *EXPR_P leading to P to also have had
2872 TREE_SIDE_EFFECTS set. */
2873 recalculate_side_effects (t);
2876 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2877 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2879 canonicalize_component_ref (expr_p);
2882 expr_stack.release ();
2884 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2886 return ret;
2889 /* Gimplify the self modifying expression pointed to by EXPR_P
2890 (++, --, +=, -=).
2892 PRE_P points to the list where side effects that must happen before
2893 *EXPR_P should be stored.
2895 POST_P points to the list where side effects that must happen after
2896 *EXPR_P should be stored.
2898 WANT_VALUE is nonzero iff we want to use the value of this expression
2899 in another expression.
2901 ARITH_TYPE is the type the computation should be performed in. */
2903 enum gimplify_status
2904 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2905 bool want_value, tree arith_type)
2907 enum tree_code code;
2908 tree lhs, lvalue, rhs, t1;
2909 gimple_seq post = NULL, *orig_post_p = post_p;
2910 bool postfix;
2911 enum tree_code arith_code;
2912 enum gimplify_status ret;
2913 location_t loc = EXPR_LOCATION (*expr_p);
2915 code = TREE_CODE (*expr_p);
2917 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2918 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2920 /* Prefix or postfix? */
2921 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2922 /* Faster to treat as prefix if result is not used. */
2923 postfix = want_value;
2924 else
2925 postfix = false;
2927 /* For postfix, make sure the inner expression's post side effects
2928 are executed after side effects from this expression. */
2929 if (postfix)
2930 post_p = &post;
2932 /* Add or subtract? */
2933 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2934 arith_code = PLUS_EXPR;
2935 else
2936 arith_code = MINUS_EXPR;
2938 /* Gimplify the LHS into a GIMPLE lvalue. */
2939 lvalue = TREE_OPERAND (*expr_p, 0);
2940 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2941 if (ret == GS_ERROR)
2942 return ret;
2944 /* Extract the operands to the arithmetic operation. */
2945 lhs = lvalue;
2946 rhs = TREE_OPERAND (*expr_p, 1);
2948 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2949 that as the result value and in the postqueue operation. */
2950 if (postfix)
2952 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2953 if (ret == GS_ERROR)
2954 return ret;
2956 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2959 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2960 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2962 rhs = convert_to_ptrofftype_loc (loc, rhs);
2963 if (arith_code == MINUS_EXPR)
2964 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2965 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2967 else
2968 t1 = fold_convert (TREE_TYPE (*expr_p),
2969 fold_build2 (arith_code, arith_type,
2970 fold_convert (arith_type, lhs),
2971 fold_convert (arith_type, rhs)));
2973 if (postfix)
2975 gimplify_assign (lvalue, t1, pre_p);
2976 gimplify_seq_add_seq (orig_post_p, post);
2977 *expr_p = lhs;
2978 return GS_ALL_DONE;
2980 else
2982 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2983 return GS_OK;
2987 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2989 static void
2990 maybe_with_size_expr (tree *expr_p)
2992 tree expr = *expr_p;
2993 tree type = TREE_TYPE (expr);
2994 tree size;
2996 /* If we've already wrapped this or the type is error_mark_node, we can't do
2997 anything. */
2998 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2999 || type == error_mark_node)
3000 return;
3002 /* If the size isn't known or is a constant, we have nothing to do. */
3003 size = TYPE_SIZE_UNIT (type);
3004 if (!size || TREE_CODE (size) == INTEGER_CST)
3005 return;
3007 /* Otherwise, make a WITH_SIZE_EXPR. */
3008 size = unshare_expr (size);
3009 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3010 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3013 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3014 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3015 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3016 gimplified to an SSA name. */
3018 enum gimplify_status
3019 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3020 bool allow_ssa)
3022 bool (*test) (tree);
3023 fallback_t fb;
3025 /* In general, we allow lvalues for function arguments to avoid
3026 extra overhead of copying large aggregates out of even larger
3027 aggregates into temporaries only to copy the temporaries to
3028 the argument list. Make optimizers happy by pulling out to
3029 temporaries those types that fit in registers. */
3030 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3031 test = is_gimple_val, fb = fb_rvalue;
3032 else
3034 test = is_gimple_lvalue, fb = fb_either;
3035 /* Also strip a TARGET_EXPR that would force an extra copy. */
3036 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3038 tree init = TARGET_EXPR_INITIAL (*arg_p);
3039 if (init
3040 && !VOID_TYPE_P (TREE_TYPE (init)))
3041 *arg_p = init;
3045 /* If this is a variable sized type, we must remember the size. */
3046 maybe_with_size_expr (arg_p);
3048 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3049 /* Make sure arguments have the same location as the function call
3050 itself. */
3051 protected_set_expr_location (*arg_p, call_location);
3053 /* There is a sequence point before a function call. Side effects in
3054 the argument list must occur before the actual call. So, when
3055 gimplifying arguments, force gimplify_expr to use an internal
3056 post queue which is then appended to the end of PRE_P. */
3057 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3060 /* Don't fold inside offloading or taskreg regions: it can break code by
3061 adding decl references that weren't in the source. We'll do it during
3062 omplower pass instead. */
3064 static bool
3065 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3067 struct gimplify_omp_ctx *ctx;
3068 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3069 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3070 return false;
3071 return fold_stmt (gsi);
3074 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3075 WANT_VALUE is true if the result of the call is desired. */
3077 static enum gimplify_status
3078 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3080 tree fndecl, parms, p, fnptrtype;
3081 enum gimplify_status ret;
3082 int i, nargs;
3083 gcall *call;
3084 bool builtin_va_start_p = false;
3085 location_t loc = EXPR_LOCATION (*expr_p);
3087 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3089 /* For reliable diagnostics during inlining, it is necessary that
3090 every call_expr be annotated with file and line. */
3091 if (! EXPR_HAS_LOCATION (*expr_p))
3092 SET_EXPR_LOCATION (*expr_p, input_location);
3094 /* Gimplify internal functions created in the FEs. */
3095 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3097 if (want_value)
3098 return GS_ALL_DONE;
3100 nargs = call_expr_nargs (*expr_p);
3101 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3102 auto_vec<tree> vargs (nargs);
3104 for (i = 0; i < nargs; i++)
3106 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3107 EXPR_LOCATION (*expr_p));
3108 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3110 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
3111 gimplify_seq_add_stmt (pre_p, call);
3112 return GS_ALL_DONE;
3115 /* This may be a call to a builtin function.
3117 Builtin function calls may be transformed into different
3118 (and more efficient) builtin function calls under certain
3119 circumstances. Unfortunately, gimplification can muck things
3120 up enough that the builtin expanders are not aware that certain
3121 transformations are still valid.
3123 So we attempt transformation/gimplification of the call before
3124 we gimplify the CALL_EXPR. At this time we do not manage to
3125 transform all calls in the same manner as the expanders do, but
3126 we do transform most of them. */
3127 fndecl = get_callee_fndecl (*expr_p);
3128 if (fndecl
3129 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3130 switch (DECL_FUNCTION_CODE (fndecl))
3132 case BUILT_IN_ALLOCA:
3133 case BUILT_IN_ALLOCA_WITH_ALIGN:
3134 /* If the call has been built for a variable-sized object, then we
3135 want to restore the stack level when the enclosing BIND_EXPR is
3136 exited to reclaim the allocated space; otherwise, we precisely
3137 need to do the opposite and preserve the latest stack level. */
3138 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3139 gimplify_ctxp->save_stack = true;
3140 else
3141 gimplify_ctxp->keep_stack = true;
3142 break;
3144 case BUILT_IN_VA_START:
3146 builtin_va_start_p = TRUE;
3147 if (call_expr_nargs (*expr_p) < 2)
3149 error ("too few arguments to function %<va_start%>");
3150 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3151 return GS_OK;
3154 if (fold_builtin_next_arg (*expr_p, true))
3156 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3157 return GS_OK;
3159 break;
3162 default:
3165 if (fndecl && DECL_BUILT_IN (fndecl))
3167 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3168 if (new_tree && new_tree != *expr_p)
3170 /* There was a transformation of this call which computes the
3171 same value, but in a more efficient way. Return and try
3172 again. */
3173 *expr_p = new_tree;
3174 return GS_OK;
3178 /* Remember the original function pointer type. */
3179 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3181 /* There is a sequence point before the call, so any side effects in
3182 the calling expression must occur before the actual call. Force
3183 gimplify_expr to use an internal post queue. */
3184 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3185 is_gimple_call_addr, fb_rvalue);
3187 nargs = call_expr_nargs (*expr_p);
3189 /* Get argument types for verification. */
3190 fndecl = get_callee_fndecl (*expr_p);
3191 parms = NULL_TREE;
3192 if (fndecl)
3193 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3194 else
3195 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3197 if (fndecl && DECL_ARGUMENTS (fndecl))
3198 p = DECL_ARGUMENTS (fndecl);
3199 else if (parms)
3200 p = parms;
3201 else
3202 p = NULL_TREE;
3203 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3206 /* If the last argument is __builtin_va_arg_pack () and it is not
3207 passed as a named argument, decrease the number of CALL_EXPR
3208 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3209 if (!p
3210 && i < nargs
3211 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3213 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3214 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3216 if (last_arg_fndecl
3217 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3218 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3219 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3221 tree call = *expr_p;
3223 --nargs;
3224 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3225 CALL_EXPR_FN (call),
3226 nargs, CALL_EXPR_ARGP (call));
3228 /* Copy all CALL_EXPR flags, location and block, except
3229 CALL_EXPR_VA_ARG_PACK flag. */
3230 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3231 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3232 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3233 = CALL_EXPR_RETURN_SLOT_OPT (call);
3234 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3235 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3237 /* Set CALL_EXPR_VA_ARG_PACK. */
3238 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3242 /* If the call returns twice then after building the CFG the call
3243 argument computations will no longer dominate the call because
3244 we add an abnormal incoming edge to the call. So do not use SSA
3245 vars there. */
3246 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3248 /* Gimplify the function arguments. */
3249 if (nargs > 0)
3251 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3252 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3253 PUSH_ARGS_REVERSED ? i-- : i++)
3255 enum gimplify_status t;
3257 /* Avoid gimplifying the second argument to va_start, which needs to
3258 be the plain PARM_DECL. */
3259 if ((i != 1) || !builtin_va_start_p)
3261 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3262 EXPR_LOCATION (*expr_p), ! returns_twice);
3264 if (t == GS_ERROR)
3265 ret = GS_ERROR;
3270 /* Gimplify the static chain. */
3271 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3273 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3274 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3275 else
3277 enum gimplify_status t;
3278 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3279 EXPR_LOCATION (*expr_p), ! returns_twice);
3280 if (t == GS_ERROR)
3281 ret = GS_ERROR;
3285 /* Verify the function result. */
3286 if (want_value && fndecl
3287 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3289 error_at (loc, "using result of function returning %<void%>");
3290 ret = GS_ERROR;
3293 /* Try this again in case gimplification exposed something. */
3294 if (ret != GS_ERROR)
3296 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3298 if (new_tree && new_tree != *expr_p)
3300 /* There was a transformation of this call which computes the
3301 same value, but in a more efficient way. Return and try
3302 again. */
3303 *expr_p = new_tree;
3304 return GS_OK;
3307 else
3309 *expr_p = error_mark_node;
3310 return GS_ERROR;
3313 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3314 decl. This allows us to eliminate redundant or useless
3315 calls to "const" functions. */
3316 if (TREE_CODE (*expr_p) == CALL_EXPR)
3318 int flags = call_expr_flags (*expr_p);
3319 if (flags & (ECF_CONST | ECF_PURE)
3320 /* An infinite loop is considered a side effect. */
3321 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3322 TREE_SIDE_EFFECTS (*expr_p) = 0;
3325 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3326 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3327 form and delegate the creation of a GIMPLE_CALL to
3328 gimplify_modify_expr. This is always possible because when
3329 WANT_VALUE is true, the caller wants the result of this call into
3330 a temporary, which means that we will emit an INIT_EXPR in
3331 internal_get_tmp_var which will then be handled by
3332 gimplify_modify_expr. */
3333 if (!want_value)
3335 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3336 have to do is replicate it as a GIMPLE_CALL tuple. */
3337 gimple_stmt_iterator gsi;
3338 call = gimple_build_call_from_tree (*expr_p);
3339 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
3340 notice_special_calls (call);
3341 gimplify_seq_add_stmt (pre_p, call);
3342 gsi = gsi_last (*pre_p);
3343 maybe_fold_stmt (&gsi);
3344 *expr_p = NULL_TREE;
3346 else
3347 /* Remember the original function type. */
3348 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3349 CALL_EXPR_FN (*expr_p));
3351 return ret;
3354 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3355 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3357 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3358 condition is true or false, respectively. If null, we should generate
3359 our own to skip over the evaluation of this specific expression.
3361 LOCUS is the source location of the COND_EXPR.
3363 This function is the tree equivalent of do_jump.
3365 shortcut_cond_r should only be called by shortcut_cond_expr. */
3367 static tree
3368 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3369 location_t locus)
3371 tree local_label = NULL_TREE;
3372 tree t, expr = NULL;
3374 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3375 retain the shortcut semantics. Just insert the gotos here;
3376 shortcut_cond_expr will append the real blocks later. */
3377 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3379 location_t new_locus;
3381 /* Turn if (a && b) into
3383 if (a); else goto no;
3384 if (b) goto yes; else goto no;
3385 (no:) */
3387 if (false_label_p == NULL)
3388 false_label_p = &local_label;
3390 /* Keep the original source location on the first 'if'. */
3391 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3392 append_to_statement_list (t, &expr);
3394 /* Set the source location of the && on the second 'if'. */
3395 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3396 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3397 new_locus);
3398 append_to_statement_list (t, &expr);
3400 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3402 location_t new_locus;
3404 /* Turn if (a || b) into
3406 if (a) goto yes;
3407 if (b) goto yes; else goto no;
3408 (yes:) */
3410 if (true_label_p == NULL)
3411 true_label_p = &local_label;
3413 /* Keep the original source location on the first 'if'. */
3414 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3415 append_to_statement_list (t, &expr);
3417 /* Set the source location of the || on the second 'if'. */
3418 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3419 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3420 new_locus);
3421 append_to_statement_list (t, &expr);
3423 else if (TREE_CODE (pred) == COND_EXPR
3424 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3425 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3427 location_t new_locus;
3429 /* As long as we're messing with gotos, turn if (a ? b : c) into
3430 if (a)
3431 if (b) goto yes; else goto no;
3432 else
3433 if (c) goto yes; else goto no;
3435 Don't do this if one of the arms has void type, which can happen
3436 in C++ when the arm is throw. */
3438 /* Keep the original source location on the first 'if'. Set the source
3439 location of the ? on the second 'if'. */
3440 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3441 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3442 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3443 false_label_p, locus),
3444 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3445 false_label_p, new_locus));
3447 else
3449 expr = build3 (COND_EXPR, void_type_node, pred,
3450 build_and_jump (true_label_p),
3451 build_and_jump (false_label_p));
3452 SET_EXPR_LOCATION (expr, locus);
3455 if (local_label)
3457 t = build1 (LABEL_EXPR, void_type_node, local_label);
3458 append_to_statement_list (t, &expr);
3461 return expr;
3464 /* Given a conditional expression EXPR with short-circuit boolean
3465 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3466 predicate apart into the equivalent sequence of conditionals. */
3468 static tree
3469 shortcut_cond_expr (tree expr)
3471 tree pred = TREE_OPERAND (expr, 0);
3472 tree then_ = TREE_OPERAND (expr, 1);
3473 tree else_ = TREE_OPERAND (expr, 2);
3474 tree true_label, false_label, end_label, t;
3475 tree *true_label_p;
3476 tree *false_label_p;
3477 bool emit_end, emit_false, jump_over_else;
3478 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3479 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3481 /* First do simple transformations. */
3482 if (!else_se)
3484 /* If there is no 'else', turn
3485 if (a && b) then c
3486 into
3487 if (a) if (b) then c. */
3488 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3490 /* Keep the original source location on the first 'if'. */
3491 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3492 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3493 /* Set the source location of the && on the second 'if'. */
3494 if (EXPR_HAS_LOCATION (pred))
3495 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3496 then_ = shortcut_cond_expr (expr);
3497 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3498 pred = TREE_OPERAND (pred, 0);
3499 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3500 SET_EXPR_LOCATION (expr, locus);
3504 if (!then_se)
3506 /* If there is no 'then', turn
3507 if (a || b); else d
3508 into
3509 if (a); else if (b); else d. */
3510 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3512 /* Keep the original source location on the first 'if'. */
3513 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3514 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3515 /* Set the source location of the || on the second 'if'. */
3516 if (EXPR_HAS_LOCATION (pred))
3517 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3518 else_ = shortcut_cond_expr (expr);
3519 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3520 pred = TREE_OPERAND (pred, 0);
3521 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3522 SET_EXPR_LOCATION (expr, locus);
3526 /* If we're done, great. */
3527 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3528 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3529 return expr;
3531 /* Otherwise we need to mess with gotos. Change
3532 if (a) c; else d;
3534 if (a); else goto no;
3535 c; goto end;
3536 no: d; end:
3537 and recursively gimplify the condition. */
3539 true_label = false_label = end_label = NULL_TREE;
3541 /* If our arms just jump somewhere, hijack those labels so we don't
3542 generate jumps to jumps. */
3544 if (then_
3545 && TREE_CODE (then_) == GOTO_EXPR
3546 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
3548 true_label = GOTO_DESTINATION (then_);
3549 then_ = NULL;
3550 then_se = false;
3553 if (else_
3554 && TREE_CODE (else_) == GOTO_EXPR
3555 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
3557 false_label = GOTO_DESTINATION (else_);
3558 else_ = NULL;
3559 else_se = false;
3562 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3563 if (true_label)
3564 true_label_p = &true_label;
3565 else
3566 true_label_p = NULL;
3568 /* The 'else' branch also needs a label if it contains interesting code. */
3569 if (false_label || else_se)
3570 false_label_p = &false_label;
3571 else
3572 false_label_p = NULL;
3574 /* If there was nothing else in our arms, just forward the label(s). */
3575 if (!then_se && !else_se)
3576 return shortcut_cond_r (pred, true_label_p, false_label_p,
3577 EXPR_LOC_OR_LOC (expr, input_location));
3579 /* If our last subexpression already has a terminal label, reuse it. */
3580 if (else_se)
3581 t = expr_last (else_);
3582 else if (then_se)
3583 t = expr_last (then_);
3584 else
3585 t = NULL;
3586 if (t && TREE_CODE (t) == LABEL_EXPR)
3587 end_label = LABEL_EXPR_LABEL (t);
3589 /* If we don't care about jumping to the 'else' branch, jump to the end
3590 if the condition is false. */
3591 if (!false_label_p)
3592 false_label_p = &end_label;
3594 /* We only want to emit these labels if we aren't hijacking them. */
3595 emit_end = (end_label == NULL_TREE);
3596 emit_false = (false_label == NULL_TREE);
3598 /* We only emit the jump over the else clause if we have to--if the
3599 then clause may fall through. Otherwise we can wind up with a
3600 useless jump and a useless label at the end of gimplified code,
3601 which will cause us to think that this conditional as a whole
3602 falls through even if it doesn't. If we then inline a function
3603 which ends with such a condition, that can cause us to issue an
3604 inappropriate warning about control reaching the end of a
3605 non-void function. */
3606 jump_over_else = block_may_fallthru (then_);
3608 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3609 EXPR_LOC_OR_LOC (expr, input_location));
3611 expr = NULL;
3612 append_to_statement_list (pred, &expr);
3614 append_to_statement_list (then_, &expr);
3615 if (else_se)
3617 if (jump_over_else)
3619 tree last = expr_last (expr);
3620 t = build_and_jump (&end_label);
3621 if (EXPR_HAS_LOCATION (last))
3622 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3623 append_to_statement_list (t, &expr);
3625 if (emit_false)
3627 t = build1 (LABEL_EXPR, void_type_node, false_label);
3628 append_to_statement_list (t, &expr);
3630 append_to_statement_list (else_, &expr);
3632 if (emit_end && end_label)
3634 t = build1 (LABEL_EXPR, void_type_node, end_label);
3635 append_to_statement_list (t, &expr);
3638 return expr;
3641 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3643 tree
3644 gimple_boolify (tree expr)
3646 tree type = TREE_TYPE (expr);
3647 location_t loc = EXPR_LOCATION (expr);
3649 if (TREE_CODE (expr) == NE_EXPR
3650 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3651 && integer_zerop (TREE_OPERAND (expr, 1)))
3653 tree call = TREE_OPERAND (expr, 0);
3654 tree fn = get_callee_fndecl (call);
3656 /* For __builtin_expect ((long) (x), y) recurse into x as well
3657 if x is truth_value_p. */
3658 if (fn
3659 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3660 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3661 && call_expr_nargs (call) == 2)
3663 tree arg = CALL_EXPR_ARG (call, 0);
3664 if (arg)
3666 if (TREE_CODE (arg) == NOP_EXPR
3667 && TREE_TYPE (arg) == TREE_TYPE (call))
3668 arg = TREE_OPERAND (arg, 0);
3669 if (truth_value_p (TREE_CODE (arg)))
3671 arg = gimple_boolify (arg);
3672 CALL_EXPR_ARG (call, 0)
3673 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3679 switch (TREE_CODE (expr))
3681 case TRUTH_AND_EXPR:
3682 case TRUTH_OR_EXPR:
3683 case TRUTH_XOR_EXPR:
3684 case TRUTH_ANDIF_EXPR:
3685 case TRUTH_ORIF_EXPR:
3686 /* Also boolify the arguments of truth exprs. */
3687 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3688 /* FALLTHRU */
3690 case TRUTH_NOT_EXPR:
3691 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3693 /* These expressions always produce boolean results. */
3694 if (TREE_CODE (type) != BOOLEAN_TYPE)
3695 TREE_TYPE (expr) = boolean_type_node;
3696 return expr;
3698 case ANNOTATE_EXPR:
3699 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3701 case annot_expr_ivdep_kind:
3702 case annot_expr_no_vector_kind:
3703 case annot_expr_vector_kind:
3704 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3705 if (TREE_CODE (type) != BOOLEAN_TYPE)
3706 TREE_TYPE (expr) = boolean_type_node;
3707 return expr;
3708 default:
3709 gcc_unreachable ();
3712 default:
3713 if (COMPARISON_CLASS_P (expr))
3715 /* There expressions always prduce boolean results. */
3716 if (TREE_CODE (type) != BOOLEAN_TYPE)
3717 TREE_TYPE (expr) = boolean_type_node;
3718 return expr;
3720 /* Other expressions that get here must have boolean values, but
3721 might need to be converted to the appropriate mode. */
3722 if (TREE_CODE (type) == BOOLEAN_TYPE)
3723 return expr;
3724 return fold_convert_loc (loc, boolean_type_node, expr);
3728 /* Given a conditional expression *EXPR_P without side effects, gimplify
3729 its operands. New statements are inserted to PRE_P. */
3731 static enum gimplify_status
3732 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3734 tree expr = *expr_p, cond;
3735 enum gimplify_status ret, tret;
3736 enum tree_code code;
3738 cond = gimple_boolify (COND_EXPR_COND (expr));
3740 /* We need to handle && and || specially, as their gimplification
3741 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3742 code = TREE_CODE (cond);
3743 if (code == TRUTH_ANDIF_EXPR)
3744 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3745 else if (code == TRUTH_ORIF_EXPR)
3746 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3747 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3748 COND_EXPR_COND (*expr_p) = cond;
3750 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3751 is_gimple_val, fb_rvalue);
3752 ret = MIN (ret, tret);
3753 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3754 is_gimple_val, fb_rvalue);
3756 return MIN (ret, tret);
3759 /* Return true if evaluating EXPR could trap.
3760 EXPR is GENERIC, while tree_could_trap_p can be called
3761 only on GIMPLE. */
3763 static bool
3764 generic_expr_could_trap_p (tree expr)
3766 unsigned i, n;
3768 if (!expr || is_gimple_val (expr))
3769 return false;
3771 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3772 return true;
3774 n = TREE_OPERAND_LENGTH (expr);
3775 for (i = 0; i < n; i++)
3776 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3777 return true;
3779 return false;
3782 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3783 into
3785 if (p) if (p)
3786 t1 = a; a;
3787 else or else
3788 t1 = b; b;
3791 The second form is used when *EXPR_P is of type void.
3793 PRE_P points to the list where side effects that must happen before
3794 *EXPR_P should be stored. */
3796 static enum gimplify_status
3797 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3799 tree expr = *expr_p;
3800 tree type = TREE_TYPE (expr);
3801 location_t loc = EXPR_LOCATION (expr);
3802 tree tmp, arm1, arm2;
3803 enum gimplify_status ret;
3804 tree label_true, label_false, label_cont;
3805 bool have_then_clause_p, have_else_clause_p;
3806 gcond *cond_stmt;
3807 enum tree_code pred_code;
3808 gimple_seq seq = NULL;
3810 /* If this COND_EXPR has a value, copy the values into a temporary within
3811 the arms. */
3812 if (!VOID_TYPE_P (type))
3814 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3815 tree result;
3817 /* If either an rvalue is ok or we do not require an lvalue, create the
3818 temporary. But we cannot do that if the type is addressable. */
3819 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3820 && !TREE_ADDRESSABLE (type))
3822 if (gimplify_ctxp->allow_rhs_cond_expr
3823 /* If either branch has side effects or could trap, it can't be
3824 evaluated unconditionally. */
3825 && !TREE_SIDE_EFFECTS (then_)
3826 && !generic_expr_could_trap_p (then_)
3827 && !TREE_SIDE_EFFECTS (else_)
3828 && !generic_expr_could_trap_p (else_))
3829 return gimplify_pure_cond_expr (expr_p, pre_p);
3831 tmp = create_tmp_var (type, "iftmp");
3832 result = tmp;
3835 /* Otherwise, only create and copy references to the values. */
3836 else
3838 type = build_pointer_type (type);
3840 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3841 then_ = build_fold_addr_expr_loc (loc, then_);
3843 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3844 else_ = build_fold_addr_expr_loc (loc, else_);
3846 expr
3847 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3849 tmp = create_tmp_var (type, "iftmp");
3850 result = build_simple_mem_ref_loc (loc, tmp);
3853 /* Build the new then clause, `tmp = then_;'. But don't build the
3854 assignment if the value is void; in C++ it can be if it's a throw. */
3855 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3856 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3858 /* Similarly, build the new else clause, `tmp = else_;'. */
3859 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3860 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3862 TREE_TYPE (expr) = void_type_node;
3863 recalculate_side_effects (expr);
3865 /* Move the COND_EXPR to the prequeue. */
3866 gimplify_stmt (&expr, pre_p);
3868 *expr_p = result;
3869 return GS_ALL_DONE;
3872 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3873 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3874 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3875 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3877 /* Make sure the condition has BOOLEAN_TYPE. */
3878 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3880 /* Break apart && and || conditions. */
3881 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3882 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3884 expr = shortcut_cond_expr (expr);
3886 if (expr != *expr_p)
3888 *expr_p = expr;
3890 /* We can't rely on gimplify_expr to re-gimplify the expanded
3891 form properly, as cleanups might cause the target labels to be
3892 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3893 set up a conditional context. */
3894 gimple_push_condition ();
3895 gimplify_stmt (expr_p, &seq);
3896 gimple_pop_condition (pre_p);
3897 gimple_seq_add_seq (pre_p, seq);
3899 return GS_ALL_DONE;
3903 /* Now do the normal gimplification. */
3905 /* Gimplify condition. */
3906 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3907 fb_rvalue);
3908 if (ret == GS_ERROR)
3909 return GS_ERROR;
3910 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3912 gimple_push_condition ();
3914 have_then_clause_p = have_else_clause_p = false;
3915 if (TREE_OPERAND (expr, 1) != NULL
3916 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3917 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3918 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3919 == current_function_decl)
3920 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3921 have different locations, otherwise we end up with incorrect
3922 location information on the branches. */
3923 && (optimize
3924 || !EXPR_HAS_LOCATION (expr)
3925 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3926 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3928 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3929 have_then_clause_p = true;
3931 else
3932 label_true = create_artificial_label (UNKNOWN_LOCATION);
3933 if (TREE_OPERAND (expr, 2) != NULL
3934 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3935 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3936 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3937 == current_function_decl)
3938 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3939 have different locations, otherwise we end up with incorrect
3940 location information on the branches. */
3941 && (optimize
3942 || !EXPR_HAS_LOCATION (expr)
3943 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3944 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3946 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3947 have_else_clause_p = true;
3949 else
3950 label_false = create_artificial_label (UNKNOWN_LOCATION);
3952 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3953 &arm2);
3954 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3955 label_false);
3956 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
3957 gimplify_seq_add_stmt (&seq, cond_stmt);
3958 gimple_stmt_iterator gsi = gsi_last (seq);
3959 maybe_fold_stmt (&gsi);
3961 label_cont = NULL_TREE;
3962 if (!have_then_clause_p)
3964 /* For if (...) {} else { code; } put label_true after
3965 the else block. */
3966 if (TREE_OPERAND (expr, 1) == NULL_TREE
3967 && !have_else_clause_p
3968 && TREE_OPERAND (expr, 2) != NULL_TREE)
3969 label_cont = label_true;
3970 else
3972 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3973 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3974 /* For if (...) { code; } else {} or
3975 if (...) { code; } else goto label; or
3976 if (...) { code; return; } else { ... }
3977 label_cont isn't needed. */
3978 if (!have_else_clause_p
3979 && TREE_OPERAND (expr, 2) != NULL_TREE
3980 && gimple_seq_may_fallthru (seq))
3982 gimple *g;
3983 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3985 g = gimple_build_goto (label_cont);
3987 /* GIMPLE_COND's are very low level; they have embedded
3988 gotos. This particular embedded goto should not be marked
3989 with the location of the original COND_EXPR, as it would
3990 correspond to the COND_EXPR's condition, not the ELSE or the
3991 THEN arms. To avoid marking it with the wrong location, flag
3992 it as "no location". */
3993 gimple_set_do_not_emit_location (g);
3995 gimplify_seq_add_stmt (&seq, g);
3999 if (!have_else_clause_p)
4001 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4002 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4004 if (label_cont)
4005 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4007 gimple_pop_condition (pre_p);
4008 gimple_seq_add_seq (pre_p, seq);
4010 if (ret == GS_ERROR)
4011 ; /* Do nothing. */
4012 else if (have_then_clause_p || have_else_clause_p)
4013 ret = GS_ALL_DONE;
4014 else
4016 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4017 expr = TREE_OPERAND (expr, 0);
4018 gimplify_stmt (&expr, pre_p);
4021 *expr_p = NULL;
4022 return ret;
4025 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4026 to be marked addressable.
4028 We cannot rely on such an expression being directly markable if a temporary
4029 has been created by the gimplification. In this case, we create another
4030 temporary and initialize it with a copy, which will become a store after we
4031 mark it addressable. This can happen if the front-end passed us something
4032 that it could not mark addressable yet, like a Fortran pass-by-reference
4033 parameter (int) floatvar. */
4035 static void
4036 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4038 while (handled_component_p (*expr_p))
4039 expr_p = &TREE_OPERAND (*expr_p, 0);
4040 if (is_gimple_reg (*expr_p))
4042 /* Do not allow an SSA name as the temporary. */
4043 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4044 DECL_GIMPLE_REG_P (var) = 0;
4045 *expr_p = var;
4049 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4050 a call to __builtin_memcpy. */
4052 static enum gimplify_status
4053 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4054 gimple_seq *seq_p)
4056 tree t, to, to_ptr, from, from_ptr;
4057 gcall *gs;
4058 location_t loc = EXPR_LOCATION (*expr_p);
4060 to = TREE_OPERAND (*expr_p, 0);
4061 from = TREE_OPERAND (*expr_p, 1);
4063 /* Mark the RHS addressable. Beware that it may not be possible to do so
4064 directly if a temporary has been created by the gimplification. */
4065 prepare_gimple_addressable (&from, seq_p);
4067 mark_addressable (from);
4068 from_ptr = build_fold_addr_expr_loc (loc, from);
4069 gimplify_arg (&from_ptr, seq_p, loc);
4071 mark_addressable (to);
4072 to_ptr = build_fold_addr_expr_loc (loc, to);
4073 gimplify_arg (&to_ptr, seq_p, loc);
4075 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4077 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4079 if (want_value)
4081 /* tmp = memcpy() */
4082 t = create_tmp_var (TREE_TYPE (to_ptr));
4083 gimple_call_set_lhs (gs, t);
4084 gimplify_seq_add_stmt (seq_p, gs);
4086 *expr_p = build_simple_mem_ref (t);
4087 return GS_ALL_DONE;
4090 gimplify_seq_add_stmt (seq_p, gs);
4091 *expr_p = NULL;
4092 return GS_ALL_DONE;
4095 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4096 a call to __builtin_memset. In this case we know that the RHS is
4097 a CONSTRUCTOR with an empty element list. */
4099 static enum gimplify_status
4100 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4101 gimple_seq *seq_p)
4103 tree t, from, to, to_ptr;
4104 gcall *gs;
4105 location_t loc = EXPR_LOCATION (*expr_p);
4107 /* Assert our assumptions, to abort instead of producing wrong code
4108 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4109 not be immediately exposed. */
4110 from = TREE_OPERAND (*expr_p, 1);
4111 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4112 from = TREE_OPERAND (from, 0);
4114 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4115 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4117 /* Now proceed. */
4118 to = TREE_OPERAND (*expr_p, 0);
4120 to_ptr = build_fold_addr_expr_loc (loc, to);
4121 gimplify_arg (&to_ptr, seq_p, loc);
4122 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4124 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4126 if (want_value)
4128 /* tmp = memset() */
4129 t = create_tmp_var (TREE_TYPE (to_ptr));
4130 gimple_call_set_lhs (gs, t);
4131 gimplify_seq_add_stmt (seq_p, gs);
4133 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4134 return GS_ALL_DONE;
4137 gimplify_seq_add_stmt (seq_p, gs);
4138 *expr_p = NULL;
4139 return GS_ALL_DONE;
4142 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4143 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4144 assignment. Return non-null if we detect a potential overlap. */
4146 struct gimplify_init_ctor_preeval_data
4148 /* The base decl of the lhs object. May be NULL, in which case we
4149 have to assume the lhs is indirect. */
4150 tree lhs_base_decl;
4152 /* The alias set of the lhs object. */
4153 alias_set_type lhs_alias_set;
4156 static tree
4157 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4159 struct gimplify_init_ctor_preeval_data *data
4160 = (struct gimplify_init_ctor_preeval_data *) xdata;
4161 tree t = *tp;
4163 /* If we find the base object, obviously we have overlap. */
4164 if (data->lhs_base_decl == t)
4165 return t;
4167 /* If the constructor component is indirect, determine if we have a
4168 potential overlap with the lhs. The only bits of information we
4169 have to go on at this point are addressability and alias sets. */
4170 if ((INDIRECT_REF_P (t)
4171 || TREE_CODE (t) == MEM_REF)
4172 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4173 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4174 return t;
4176 /* If the constructor component is a call, determine if it can hide a
4177 potential overlap with the lhs through an INDIRECT_REF like above.
4178 ??? Ugh - this is completely broken. In fact this whole analysis
4179 doesn't look conservative. */
4180 if (TREE_CODE (t) == CALL_EXPR)
4182 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4184 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4185 if (POINTER_TYPE_P (TREE_VALUE (type))
4186 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4187 && alias_sets_conflict_p (data->lhs_alias_set,
4188 get_alias_set
4189 (TREE_TYPE (TREE_VALUE (type)))))
4190 return t;
4193 if (IS_TYPE_OR_DECL_P (t))
4194 *walk_subtrees = 0;
4195 return NULL;
4198 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4199 force values that overlap with the lhs (as described by *DATA)
4200 into temporaries. */
4202 static void
4203 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4204 struct gimplify_init_ctor_preeval_data *data)
4206 enum gimplify_status one;
4208 /* If the value is constant, then there's nothing to pre-evaluate. */
4209 if (TREE_CONSTANT (*expr_p))
4211 /* Ensure it does not have side effects, it might contain a reference to
4212 the object we're initializing. */
4213 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4214 return;
4217 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4218 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4219 return;
4221 /* Recurse for nested constructors. */
4222 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4224 unsigned HOST_WIDE_INT ix;
4225 constructor_elt *ce;
4226 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4228 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4229 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4231 return;
4234 /* If this is a variable sized type, we must remember the size. */
4235 maybe_with_size_expr (expr_p);
4237 /* Gimplify the constructor element to something appropriate for the rhs
4238 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4239 the gimplifier will consider this a store to memory. Doing this
4240 gimplification now means that we won't have to deal with complicated
4241 language-specific trees, nor trees like SAVE_EXPR that can induce
4242 exponential search behavior. */
4243 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4244 if (one == GS_ERROR)
4246 *expr_p = NULL;
4247 return;
4250 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4251 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4252 always be true for all scalars, since is_gimple_mem_rhs insists on a
4253 temporary variable for them. */
4254 if (DECL_P (*expr_p))
4255 return;
4257 /* If this is of variable size, we have no choice but to assume it doesn't
4258 overlap since we can't make a temporary for it. */
4259 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4260 return;
4262 /* Otherwise, we must search for overlap ... */
4263 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4264 return;
4266 /* ... and if found, force the value into a temporary. */
4267 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4270 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4271 a RANGE_EXPR in a CONSTRUCTOR for an array.
4273 var = lower;
4274 loop_entry:
4275 object[var] = value;
4276 if (var == upper)
4277 goto loop_exit;
4278 var = var + 1;
4279 goto loop_entry;
4280 loop_exit:
4282 We increment var _after_ the loop exit check because we might otherwise
4283 fail if upper == TYPE_MAX_VALUE (type for upper).
4285 Note that we never have to deal with SAVE_EXPRs here, because this has
4286 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4288 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4289 gimple_seq *, bool);
4291 static void
4292 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4293 tree value, tree array_elt_type,
4294 gimple_seq *pre_p, bool cleared)
4296 tree loop_entry_label, loop_exit_label, fall_thru_label;
4297 tree var, var_type, cref, tmp;
4299 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4300 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4301 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4303 /* Create and initialize the index variable. */
4304 var_type = TREE_TYPE (upper);
4305 var = create_tmp_var (var_type);
4306 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4308 /* Add the loop entry label. */
4309 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4311 /* Build the reference. */
4312 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4313 var, NULL_TREE, NULL_TREE);
4315 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4316 the store. Otherwise just assign value to the reference. */
4318 if (TREE_CODE (value) == CONSTRUCTOR)
4319 /* NB we might have to call ourself recursively through
4320 gimplify_init_ctor_eval if the value is a constructor. */
4321 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4322 pre_p, cleared);
4323 else
4324 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4326 /* We exit the loop when the index var is equal to the upper bound. */
4327 gimplify_seq_add_stmt (pre_p,
4328 gimple_build_cond (EQ_EXPR, var, upper,
4329 loop_exit_label, fall_thru_label));
4331 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4333 /* Otherwise, increment the index var... */
4334 tmp = build2 (PLUS_EXPR, var_type, var,
4335 fold_convert (var_type, integer_one_node));
4336 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4338 /* ...and jump back to the loop entry. */
4339 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4341 /* Add the loop exit label. */
4342 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4345 /* Return true if FDECL is accessing a field that is zero sized. */
4347 static bool
4348 zero_sized_field_decl (const_tree fdecl)
4350 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4351 && integer_zerop (DECL_SIZE (fdecl)))
4352 return true;
4353 return false;
4356 /* Return true if TYPE is zero sized. */
4358 static bool
4359 zero_sized_type (const_tree type)
4361 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4362 && integer_zerop (TYPE_SIZE (type)))
4363 return true;
4364 return false;
4367 /* A subroutine of gimplify_init_constructor. Generate individual
4368 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4369 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4370 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4371 zeroed first. */
4373 static void
4374 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4375 gimple_seq *pre_p, bool cleared)
4377 tree array_elt_type = NULL;
4378 unsigned HOST_WIDE_INT ix;
4379 tree purpose, value;
4381 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4382 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4384 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4386 tree cref;
4388 /* NULL values are created above for gimplification errors. */
4389 if (value == NULL)
4390 continue;
4392 if (cleared && initializer_zerop (value))
4393 continue;
4395 /* ??? Here's to hoping the front end fills in all of the indices,
4396 so we don't have to figure out what's missing ourselves. */
4397 gcc_assert (purpose);
4399 /* Skip zero-sized fields, unless value has side-effects. This can
4400 happen with calls to functions returning a zero-sized type, which
4401 we shouldn't discard. As a number of downstream passes don't
4402 expect sets of zero-sized fields, we rely on the gimplification of
4403 the MODIFY_EXPR we make below to drop the assignment statement. */
4404 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4405 continue;
4407 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4408 whole range. */
4409 if (TREE_CODE (purpose) == RANGE_EXPR)
4411 tree lower = TREE_OPERAND (purpose, 0);
4412 tree upper = TREE_OPERAND (purpose, 1);
4414 /* If the lower bound is equal to upper, just treat it as if
4415 upper was the index. */
4416 if (simple_cst_equal (lower, upper))
4417 purpose = upper;
4418 else
4420 gimplify_init_ctor_eval_range (object, lower, upper, value,
4421 array_elt_type, pre_p, cleared);
4422 continue;
4426 if (array_elt_type)
4428 /* Do not use bitsizetype for ARRAY_REF indices. */
4429 if (TYPE_DOMAIN (TREE_TYPE (object)))
4430 purpose
4431 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4432 purpose);
4433 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4434 purpose, NULL_TREE, NULL_TREE);
4436 else
4438 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4439 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4440 unshare_expr (object), purpose, NULL_TREE);
4443 if (TREE_CODE (value) == CONSTRUCTOR
4444 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4445 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4446 pre_p, cleared);
4447 else
4449 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4450 gimplify_and_add (init, pre_p);
4451 ggc_free (init);
4456 /* Return the appropriate RHS predicate for this LHS. */
4458 gimple_predicate
4459 rhs_predicate_for (tree lhs)
4461 if (is_gimple_reg (lhs))
4462 return is_gimple_reg_rhs_or_call;
4463 else
4464 return is_gimple_mem_rhs_or_call;
4467 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4468 before the LHS has been gimplified. */
4470 static gimple_predicate
4471 initial_rhs_predicate_for (tree lhs)
4473 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4474 return is_gimple_reg_rhs_or_call;
4475 else
4476 return is_gimple_mem_rhs_or_call;
4479 /* Gimplify a C99 compound literal expression. This just means adding
4480 the DECL_EXPR before the current statement and using its anonymous
4481 decl instead. */
4483 static enum gimplify_status
4484 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4485 bool (*gimple_test_f) (tree),
4486 fallback_t fallback)
4488 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4489 tree decl = DECL_EXPR_DECL (decl_s);
4490 tree init = DECL_INITIAL (decl);
4491 /* Mark the decl as addressable if the compound literal
4492 expression is addressable now, otherwise it is marked too late
4493 after we gimplify the initialization expression. */
4494 if (TREE_ADDRESSABLE (*expr_p))
4495 TREE_ADDRESSABLE (decl) = 1;
4496 /* Otherwise, if we don't need an lvalue and have a literal directly
4497 substitute it. Check if it matches the gimple predicate, as
4498 otherwise we'd generate a new temporary, and we can as well just
4499 use the decl we already have. */
4500 else if (!TREE_ADDRESSABLE (decl)
4501 && init
4502 && (fallback & fb_lvalue) == 0
4503 && gimple_test_f (init))
4505 *expr_p = init;
4506 return GS_OK;
4509 /* Preliminarily mark non-addressed complex variables as eligible
4510 for promotion to gimple registers. We'll transform their uses
4511 as we find them. */
4512 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4513 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4514 && !TREE_THIS_VOLATILE (decl)
4515 && !needs_to_live_in_memory (decl))
4516 DECL_GIMPLE_REG_P (decl) = 1;
4518 /* If the decl is not addressable, then it is being used in some
4519 expression or on the right hand side of a statement, and it can
4520 be put into a readonly data section. */
4521 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4522 TREE_READONLY (decl) = 1;
4524 /* This decl isn't mentioned in the enclosing block, so add it to the
4525 list of temps. FIXME it seems a bit of a kludge to say that
4526 anonymous artificial vars aren't pushed, but everything else is. */
4527 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4528 gimple_add_tmp_var (decl);
4530 gimplify_and_add (decl_s, pre_p);
4531 *expr_p = decl;
4532 return GS_OK;
4535 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4536 return a new CONSTRUCTOR if something changed. */
4538 static tree
4539 optimize_compound_literals_in_ctor (tree orig_ctor)
4541 tree ctor = orig_ctor;
4542 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4543 unsigned int idx, num = vec_safe_length (elts);
4545 for (idx = 0; idx < num; idx++)
4547 tree value = (*elts)[idx].value;
4548 tree newval = value;
4549 if (TREE_CODE (value) == CONSTRUCTOR)
4550 newval = optimize_compound_literals_in_ctor (value);
4551 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4553 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4554 tree decl = DECL_EXPR_DECL (decl_s);
4555 tree init = DECL_INITIAL (decl);
4557 if (!TREE_ADDRESSABLE (value)
4558 && !TREE_ADDRESSABLE (decl)
4559 && init
4560 && TREE_CODE (init) == CONSTRUCTOR)
4561 newval = optimize_compound_literals_in_ctor (init);
4563 if (newval == value)
4564 continue;
4566 if (ctor == orig_ctor)
4568 ctor = copy_node (orig_ctor);
4569 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4570 elts = CONSTRUCTOR_ELTS (ctor);
4572 (*elts)[idx].value = newval;
4574 return ctor;
4577 /* A subroutine of gimplify_modify_expr. Break out elements of a
4578 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4580 Note that we still need to clear any elements that don't have explicit
4581 initializers, so if not all elements are initialized we keep the
4582 original MODIFY_EXPR, we just remove all of the constructor elements.
4584 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4585 GS_ERROR if we would have to create a temporary when gimplifying
4586 this constructor. Otherwise, return GS_OK.
4588 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4590 static enum gimplify_status
4591 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4592 bool want_value, bool notify_temp_creation)
4594 tree object, ctor, type;
4595 enum gimplify_status ret;
4596 vec<constructor_elt, va_gc> *elts;
4598 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4600 if (!notify_temp_creation)
4602 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4603 is_gimple_lvalue, fb_lvalue);
4604 if (ret == GS_ERROR)
4605 return ret;
4608 object = TREE_OPERAND (*expr_p, 0);
4609 ctor = TREE_OPERAND (*expr_p, 1)
4610 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4611 type = TREE_TYPE (ctor);
4612 elts = CONSTRUCTOR_ELTS (ctor);
4613 ret = GS_ALL_DONE;
4615 switch (TREE_CODE (type))
4617 case RECORD_TYPE:
4618 case UNION_TYPE:
4619 case QUAL_UNION_TYPE:
4620 case ARRAY_TYPE:
4622 struct gimplify_init_ctor_preeval_data preeval_data;
4623 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4624 bool cleared, complete_p, valid_const_initializer;
4626 /* Aggregate types must lower constructors to initialization of
4627 individual elements. The exception is that a CONSTRUCTOR node
4628 with no elements indicates zero-initialization of the whole. */
4629 if (vec_safe_is_empty (elts))
4631 if (notify_temp_creation)
4632 return GS_OK;
4633 break;
4636 /* Fetch information about the constructor to direct later processing.
4637 We might want to make static versions of it in various cases, and
4638 can only do so if it known to be a valid constant initializer. */
4639 valid_const_initializer
4640 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4641 &num_ctor_elements, &complete_p);
4643 /* If a const aggregate variable is being initialized, then it
4644 should never be a lose to promote the variable to be static. */
4645 if (valid_const_initializer
4646 && num_nonzero_elements > 1
4647 && TREE_READONLY (object)
4648 && VAR_P (object)
4649 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4651 if (notify_temp_creation)
4652 return GS_ERROR;
4653 DECL_INITIAL (object) = ctor;
4654 TREE_STATIC (object) = 1;
4655 if (!DECL_NAME (object))
4656 DECL_NAME (object) = create_tmp_var_name ("C");
4657 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4659 /* ??? C++ doesn't automatically append a .<number> to the
4660 assembler name, and even when it does, it looks at FE private
4661 data structures to figure out what that number should be,
4662 which are not set for this variable. I suppose this is
4663 important for local statics for inline functions, which aren't
4664 "local" in the object file sense. So in order to get a unique
4665 TU-local symbol, we must invoke the lhd version now. */
4666 lhd_set_decl_assembler_name (object);
4668 *expr_p = NULL_TREE;
4669 break;
4672 /* If there are "lots" of initialized elements, even discounting
4673 those that are not address constants (and thus *must* be
4674 computed at runtime), then partition the constructor into
4675 constant and non-constant parts. Block copy the constant
4676 parts in, then generate code for the non-constant parts. */
4677 /* TODO. There's code in cp/typeck.c to do this. */
4679 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4680 /* store_constructor will ignore the clearing of variable-sized
4681 objects. Initializers for such objects must explicitly set
4682 every field that needs to be set. */
4683 cleared = false;
4684 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
4685 /* If the constructor isn't complete, clear the whole object
4686 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4688 ??? This ought not to be needed. For any element not present
4689 in the initializer, we should simply set them to zero. Except
4690 we'd need to *find* the elements that are not present, and that
4691 requires trickery to avoid quadratic compile-time behavior in
4692 large cases or excessive memory use in small cases. */
4693 cleared = true;
4694 else if (num_ctor_elements - num_nonzero_elements
4695 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4696 && num_nonzero_elements < num_ctor_elements / 4)
4697 /* If there are "lots" of zeros, it's more efficient to clear
4698 the memory and then set the nonzero elements. */
4699 cleared = true;
4700 else
4701 cleared = false;
4703 /* If there are "lots" of initialized elements, and all of them
4704 are valid address constants, then the entire initializer can
4705 be dropped to memory, and then memcpy'd out. Don't do this
4706 for sparse arrays, though, as it's more efficient to follow
4707 the standard CONSTRUCTOR behavior of memset followed by
4708 individual element initialization. Also don't do this for small
4709 all-zero initializers (which aren't big enough to merit
4710 clearing), and don't try to make bitwise copies of
4711 TREE_ADDRESSABLE types.
4713 We cannot apply such transformation when compiling chkp static
4714 initializer because creation of initializer image in the memory
4715 will require static initialization of bounds for it. It should
4716 result in another gimplification of similar initializer and we
4717 may fall into infinite loop. */
4718 if (valid_const_initializer
4719 && !(cleared || num_nonzero_elements == 0)
4720 && !TREE_ADDRESSABLE (type)
4721 && (!current_function_decl
4722 || !lookup_attribute ("chkp ctor",
4723 DECL_ATTRIBUTES (current_function_decl))))
4725 HOST_WIDE_INT size = int_size_in_bytes (type);
4726 unsigned int align;
4728 /* ??? We can still get unbounded array types, at least
4729 from the C++ front end. This seems wrong, but attempt
4730 to work around it for now. */
4731 if (size < 0)
4733 size = int_size_in_bytes (TREE_TYPE (object));
4734 if (size >= 0)
4735 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4738 /* Find the maximum alignment we can assume for the object. */
4739 /* ??? Make use of DECL_OFFSET_ALIGN. */
4740 if (DECL_P (object))
4741 align = DECL_ALIGN (object);
4742 else
4743 align = TYPE_ALIGN (type);
4745 /* Do a block move either if the size is so small as to make
4746 each individual move a sub-unit move on average, or if it
4747 is so large as to make individual moves inefficient. */
4748 if (size > 0
4749 && num_nonzero_elements > 1
4750 && (size < num_nonzero_elements
4751 || !can_move_by_pieces (size, align)))
4753 if (notify_temp_creation)
4754 return GS_ERROR;
4756 walk_tree (&ctor, force_labels_r, NULL, NULL);
4757 ctor = tree_output_constant_def (ctor);
4758 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4759 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4760 TREE_OPERAND (*expr_p, 1) = ctor;
4762 /* This is no longer an assignment of a CONSTRUCTOR, but
4763 we still may have processing to do on the LHS. So
4764 pretend we didn't do anything here to let that happen. */
4765 return GS_UNHANDLED;
4769 /* If the target is volatile, we have non-zero elements and more than
4770 one field to assign, initialize the target from a temporary. */
4771 if (TREE_THIS_VOLATILE (object)
4772 && !TREE_ADDRESSABLE (type)
4773 && num_nonzero_elements > 0
4774 && vec_safe_length (elts) > 1)
4776 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4777 TREE_OPERAND (*expr_p, 0) = temp;
4778 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4779 *expr_p,
4780 build2 (MODIFY_EXPR, void_type_node,
4781 object, temp));
4782 return GS_OK;
4785 if (notify_temp_creation)
4786 return GS_OK;
4788 /* If there are nonzero elements and if needed, pre-evaluate to capture
4789 elements overlapping with the lhs into temporaries. We must do this
4790 before clearing to fetch the values before they are zeroed-out. */
4791 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4793 preeval_data.lhs_base_decl = get_base_address (object);
4794 if (!DECL_P (preeval_data.lhs_base_decl))
4795 preeval_data.lhs_base_decl = NULL;
4796 preeval_data.lhs_alias_set = get_alias_set (object);
4798 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4799 pre_p, post_p, &preeval_data);
4802 bool ctor_has_side_effects_p
4803 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4805 if (cleared)
4807 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4808 Note that we still have to gimplify, in order to handle the
4809 case of variable sized types. Avoid shared tree structures. */
4810 CONSTRUCTOR_ELTS (ctor) = NULL;
4811 TREE_SIDE_EFFECTS (ctor) = 0;
4812 object = unshare_expr (object);
4813 gimplify_stmt (expr_p, pre_p);
4816 /* If we have not block cleared the object, or if there are nonzero
4817 elements in the constructor, or if the constructor has side effects,
4818 add assignments to the individual scalar fields of the object. */
4819 if (!cleared
4820 || num_nonzero_elements > 0
4821 || ctor_has_side_effects_p)
4822 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4824 *expr_p = NULL_TREE;
4826 break;
4828 case COMPLEX_TYPE:
4830 tree r, i;
4832 if (notify_temp_creation)
4833 return GS_OK;
4835 /* Extract the real and imaginary parts out of the ctor. */
4836 gcc_assert (elts->length () == 2);
4837 r = (*elts)[0].value;
4838 i = (*elts)[1].value;
4839 if (r == NULL || i == NULL)
4841 tree zero = build_zero_cst (TREE_TYPE (type));
4842 if (r == NULL)
4843 r = zero;
4844 if (i == NULL)
4845 i = zero;
4848 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4849 represent creation of a complex value. */
4850 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4852 ctor = build_complex (type, r, i);
4853 TREE_OPERAND (*expr_p, 1) = ctor;
4855 else
4857 ctor = build2 (COMPLEX_EXPR, type, r, i);
4858 TREE_OPERAND (*expr_p, 1) = ctor;
4859 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4860 pre_p,
4861 post_p,
4862 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4863 fb_rvalue);
4866 break;
4868 case VECTOR_TYPE:
4870 unsigned HOST_WIDE_INT ix;
4871 constructor_elt *ce;
4873 if (notify_temp_creation)
4874 return GS_OK;
4876 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4877 if (TREE_CONSTANT (ctor))
4879 bool constant_p = true;
4880 tree value;
4882 /* Even when ctor is constant, it might contain non-*_CST
4883 elements, such as addresses or trapping values like
4884 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4885 in VECTOR_CST nodes. */
4886 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4887 if (!CONSTANT_CLASS_P (value))
4889 constant_p = false;
4890 break;
4893 if (constant_p)
4895 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4896 break;
4899 TREE_CONSTANT (ctor) = 0;
4902 /* Vector types use CONSTRUCTOR all the way through gimple
4903 compilation as a general initializer. */
4904 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4906 enum gimplify_status tret;
4907 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4908 fb_rvalue);
4909 if (tret == GS_ERROR)
4910 ret = GS_ERROR;
4911 else if (TREE_STATIC (ctor)
4912 && !initializer_constant_valid_p (ce->value,
4913 TREE_TYPE (ce->value)))
4914 TREE_STATIC (ctor) = 0;
4916 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4917 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4919 break;
4921 default:
4922 /* So how did we get a CONSTRUCTOR for a scalar type? */
4923 gcc_unreachable ();
4926 if (ret == GS_ERROR)
4927 return GS_ERROR;
4928 /* If we have gimplified both sides of the initializer but have
4929 not emitted an assignment, do so now. */
4930 if (*expr_p)
4932 tree lhs = TREE_OPERAND (*expr_p, 0);
4933 tree rhs = TREE_OPERAND (*expr_p, 1);
4934 if (want_value && object == lhs)
4935 lhs = unshare_expr (lhs);
4936 gassign *init = gimple_build_assign (lhs, rhs);
4937 gimplify_seq_add_stmt (pre_p, init);
4939 if (want_value)
4941 *expr_p = object;
4942 return GS_OK;
4944 else
4946 *expr_p = NULL;
4947 return GS_ALL_DONE;
4951 /* Given a pointer value OP0, return a simplified version of an
4952 indirection through OP0, or NULL_TREE if no simplification is
4953 possible. This may only be applied to a rhs of an expression.
4954 Note that the resulting type may be different from the type pointed
4955 to in the sense that it is still compatible from the langhooks
4956 point of view. */
4958 static tree
4959 gimple_fold_indirect_ref_rhs (tree t)
4961 return gimple_fold_indirect_ref (t);
4964 /* Subroutine of gimplify_modify_expr to do simplifications of
4965 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4966 something changes. */
4968 static enum gimplify_status
4969 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4970 gimple_seq *pre_p, gimple_seq *post_p,
4971 bool want_value)
4973 enum gimplify_status ret = GS_UNHANDLED;
4974 bool changed;
4978 changed = false;
4979 switch (TREE_CODE (*from_p))
4981 case VAR_DECL:
4982 /* If we're assigning from a read-only variable initialized with
4983 a constructor, do the direct assignment from the constructor,
4984 but only if neither source nor target are volatile since this
4985 latter assignment might end up being done on a per-field basis. */
4986 if (DECL_INITIAL (*from_p)
4987 && TREE_READONLY (*from_p)
4988 && !TREE_THIS_VOLATILE (*from_p)
4989 && !TREE_THIS_VOLATILE (*to_p)
4990 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4992 tree old_from = *from_p;
4993 enum gimplify_status subret;
4995 /* Move the constructor into the RHS. */
4996 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4998 /* Let's see if gimplify_init_constructor will need to put
4999 it in memory. */
5000 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5001 false, true);
5002 if (subret == GS_ERROR)
5004 /* If so, revert the change. */
5005 *from_p = old_from;
5007 else
5009 ret = GS_OK;
5010 changed = true;
5013 break;
5014 case INDIRECT_REF:
5016 /* If we have code like
5018 *(const A*)(A*)&x
5020 where the type of "x" is a (possibly cv-qualified variant
5021 of "A"), treat the entire expression as identical to "x".
5022 This kind of code arises in C++ when an object is bound
5023 to a const reference, and if "x" is a TARGET_EXPR we want
5024 to take advantage of the optimization below. */
5025 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5026 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5027 if (t)
5029 if (TREE_THIS_VOLATILE (t) != volatile_p)
5031 if (DECL_P (t))
5032 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5033 build_fold_addr_expr (t));
5034 if (REFERENCE_CLASS_P (t))
5035 TREE_THIS_VOLATILE (t) = volatile_p;
5037 *from_p = t;
5038 ret = GS_OK;
5039 changed = true;
5041 break;
5044 case TARGET_EXPR:
5046 /* If we are initializing something from a TARGET_EXPR, strip the
5047 TARGET_EXPR and initialize it directly, if possible. This can't
5048 be done if the initializer is void, since that implies that the
5049 temporary is set in some non-trivial way.
5051 ??? What about code that pulls out the temp and uses it
5052 elsewhere? I think that such code never uses the TARGET_EXPR as
5053 an initializer. If I'm wrong, we'll die because the temp won't
5054 have any RTL. In that case, I guess we'll need to replace
5055 references somehow. */
5056 tree init = TARGET_EXPR_INITIAL (*from_p);
5058 if (init
5059 && !VOID_TYPE_P (TREE_TYPE (init)))
5061 *from_p = init;
5062 ret = GS_OK;
5063 changed = true;
5066 break;
5068 case COMPOUND_EXPR:
5069 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5070 caught. */
5071 gimplify_compound_expr (from_p, pre_p, true);
5072 ret = GS_OK;
5073 changed = true;
5074 break;
5076 case CONSTRUCTOR:
5077 /* If we already made some changes, let the front end have a
5078 crack at this before we break it down. */
5079 if (ret != GS_UNHANDLED)
5080 break;
5081 /* If we're initializing from a CONSTRUCTOR, break this into
5082 individual MODIFY_EXPRs. */
5083 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5084 false);
5086 case COND_EXPR:
5087 /* If we're assigning to a non-register type, push the assignment
5088 down into the branches. This is mandatory for ADDRESSABLE types,
5089 since we cannot generate temporaries for such, but it saves a
5090 copy in other cases as well. */
5091 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5093 /* This code should mirror the code in gimplify_cond_expr. */
5094 enum tree_code code = TREE_CODE (*expr_p);
5095 tree cond = *from_p;
5096 tree result = *to_p;
5098 ret = gimplify_expr (&result, pre_p, post_p,
5099 is_gimple_lvalue, fb_lvalue);
5100 if (ret != GS_ERROR)
5101 ret = GS_OK;
5103 /* If we are going to write RESULT more than once, clear
5104 TREE_READONLY flag, otherwise we might incorrectly promote
5105 the variable to static const and initialize it at compile
5106 time in one of the branches. */
5107 if (VAR_P (result)
5108 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5109 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5110 TREE_READONLY (result) = 0;
5111 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5112 TREE_OPERAND (cond, 1)
5113 = build2 (code, void_type_node, result,
5114 TREE_OPERAND (cond, 1));
5115 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5116 TREE_OPERAND (cond, 2)
5117 = build2 (code, void_type_node, unshare_expr (result),
5118 TREE_OPERAND (cond, 2));
5120 TREE_TYPE (cond) = void_type_node;
5121 recalculate_side_effects (cond);
5123 if (want_value)
5125 gimplify_and_add (cond, pre_p);
5126 *expr_p = unshare_expr (result);
5128 else
5129 *expr_p = cond;
5130 return ret;
5132 break;
5134 case CALL_EXPR:
5135 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5136 return slot so that we don't generate a temporary. */
5137 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5138 && aggregate_value_p (*from_p, *from_p))
5140 bool use_target;
5142 if (!(rhs_predicate_for (*to_p))(*from_p))
5143 /* If we need a temporary, *to_p isn't accurate. */
5144 use_target = false;
5145 /* It's OK to use the return slot directly unless it's an NRV. */
5146 else if (TREE_CODE (*to_p) == RESULT_DECL
5147 && DECL_NAME (*to_p) == NULL_TREE
5148 && needs_to_live_in_memory (*to_p))
5149 use_target = true;
5150 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5151 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5152 /* Don't force regs into memory. */
5153 use_target = false;
5154 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5155 /* It's OK to use the target directly if it's being
5156 initialized. */
5157 use_target = true;
5158 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5159 != INTEGER_CST)
5160 /* Always use the target and thus RSO for variable-sized types.
5161 GIMPLE cannot deal with a variable-sized assignment
5162 embedded in a call statement. */
5163 use_target = true;
5164 else if (TREE_CODE (*to_p) != SSA_NAME
5165 && (!is_gimple_variable (*to_p)
5166 || needs_to_live_in_memory (*to_p)))
5167 /* Don't use the original target if it's already addressable;
5168 if its address escapes, and the called function uses the
5169 NRV optimization, a conforming program could see *to_p
5170 change before the called function returns; see c++/19317.
5171 When optimizing, the return_slot pass marks more functions
5172 as safe after we have escape info. */
5173 use_target = false;
5174 else
5175 use_target = true;
5177 if (use_target)
5179 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5180 mark_addressable (*to_p);
5183 break;
5185 case WITH_SIZE_EXPR:
5186 /* Likewise for calls that return an aggregate of non-constant size,
5187 since we would not be able to generate a temporary at all. */
5188 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5190 *from_p = TREE_OPERAND (*from_p, 0);
5191 /* We don't change ret in this case because the
5192 WITH_SIZE_EXPR might have been added in
5193 gimplify_modify_expr, so returning GS_OK would lead to an
5194 infinite loop. */
5195 changed = true;
5197 break;
5199 /* If we're initializing from a container, push the initialization
5200 inside it. */
5201 case CLEANUP_POINT_EXPR:
5202 case BIND_EXPR:
5203 case STATEMENT_LIST:
5205 tree wrap = *from_p;
5206 tree t;
5208 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5209 fb_lvalue);
5210 if (ret != GS_ERROR)
5211 ret = GS_OK;
5213 t = voidify_wrapper_expr (wrap, *expr_p);
5214 gcc_assert (t == *expr_p);
5216 if (want_value)
5218 gimplify_and_add (wrap, pre_p);
5219 *expr_p = unshare_expr (*to_p);
5221 else
5222 *expr_p = wrap;
5223 return GS_OK;
5226 case COMPOUND_LITERAL_EXPR:
5228 tree complit = TREE_OPERAND (*expr_p, 1);
5229 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5230 tree decl = DECL_EXPR_DECL (decl_s);
5231 tree init = DECL_INITIAL (decl);
5233 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5234 into struct T x = { 0, 1, 2 } if the address of the
5235 compound literal has never been taken. */
5236 if (!TREE_ADDRESSABLE (complit)
5237 && !TREE_ADDRESSABLE (decl)
5238 && init)
5240 *expr_p = copy_node (*expr_p);
5241 TREE_OPERAND (*expr_p, 1) = init;
5242 return GS_OK;
5246 default:
5247 break;
5250 while (changed);
5252 return ret;
5256 /* Return true if T looks like a valid GIMPLE statement. */
5258 static bool
5259 is_gimple_stmt (tree t)
5261 const enum tree_code code = TREE_CODE (t);
5263 switch (code)
5265 case NOP_EXPR:
5266 /* The only valid NOP_EXPR is the empty statement. */
5267 return IS_EMPTY_STMT (t);
5269 case BIND_EXPR:
5270 case COND_EXPR:
5271 /* These are only valid if they're void. */
5272 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5274 case SWITCH_EXPR:
5275 case GOTO_EXPR:
5276 case RETURN_EXPR:
5277 case LABEL_EXPR:
5278 case CASE_LABEL_EXPR:
5279 case TRY_CATCH_EXPR:
5280 case TRY_FINALLY_EXPR:
5281 case EH_FILTER_EXPR:
5282 case CATCH_EXPR:
5283 case ASM_EXPR:
5284 case STATEMENT_LIST:
5285 case OACC_PARALLEL:
5286 case OACC_KERNELS:
5287 case OACC_DATA:
5288 case OACC_HOST_DATA:
5289 case OACC_DECLARE:
5290 case OACC_UPDATE:
5291 case OACC_ENTER_DATA:
5292 case OACC_EXIT_DATA:
5293 case OACC_CACHE:
5294 case OMP_PARALLEL:
5295 case OMP_FOR:
5296 case OMP_SIMD:
5297 case CILK_SIMD:
5298 case OMP_DISTRIBUTE:
5299 case OACC_LOOP:
5300 case OMP_SECTIONS:
5301 case OMP_SECTION:
5302 case OMP_SINGLE:
5303 case OMP_MASTER:
5304 case OMP_TASKGROUP:
5305 case OMP_ORDERED:
5306 case OMP_CRITICAL:
5307 case OMP_TASK:
5308 case OMP_TARGET:
5309 case OMP_TARGET_DATA:
5310 case OMP_TARGET_UPDATE:
5311 case OMP_TARGET_ENTER_DATA:
5312 case OMP_TARGET_EXIT_DATA:
5313 case OMP_TASKLOOP:
5314 case OMP_TEAMS:
5315 /* These are always void. */
5316 return true;
5318 case CALL_EXPR:
5319 case MODIFY_EXPR:
5320 case PREDICT_EXPR:
5321 /* These are valid regardless of their type. */
5322 return true;
5324 default:
5325 return false;
5330 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5331 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5332 DECL_GIMPLE_REG_P set.
5334 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5335 other, unmodified part of the complex object just before the total store.
5336 As a consequence, if the object is still uninitialized, an undefined value
5337 will be loaded into a register, which may result in a spurious exception
5338 if the register is floating-point and the value happens to be a signaling
5339 NaN for example. Then the fully-fledged complex operations lowering pass
5340 followed by a DCE pass are necessary in order to fix things up. */
5342 static enum gimplify_status
5343 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5344 bool want_value)
5346 enum tree_code code, ocode;
5347 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5349 lhs = TREE_OPERAND (*expr_p, 0);
5350 rhs = TREE_OPERAND (*expr_p, 1);
5351 code = TREE_CODE (lhs);
5352 lhs = TREE_OPERAND (lhs, 0);
5354 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5355 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5356 TREE_NO_WARNING (other) = 1;
5357 other = get_formal_tmp_var (other, pre_p);
5359 realpart = code == REALPART_EXPR ? rhs : other;
5360 imagpart = code == REALPART_EXPR ? other : rhs;
5362 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5363 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5364 else
5365 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5367 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5368 *expr_p = (want_value) ? rhs : NULL_TREE;
5370 return GS_ALL_DONE;
5373 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5375 modify_expr
5376 : varname '=' rhs
5377 | '*' ID '=' rhs
5379 PRE_P points to the list where side effects that must happen before
5380 *EXPR_P should be stored.
5382 POST_P points to the list where side effects that must happen after
5383 *EXPR_P should be stored.
5385 WANT_VALUE is nonzero iff we want to use the value of this expression
5386 in another expression. */
5388 static enum gimplify_status
5389 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5390 bool want_value)
5392 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5393 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5394 enum gimplify_status ret = GS_UNHANDLED;
5395 gimple *assign;
5396 location_t loc = EXPR_LOCATION (*expr_p);
5397 gimple_stmt_iterator gsi;
5399 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5400 || TREE_CODE (*expr_p) == INIT_EXPR);
5402 /* Trying to simplify a clobber using normal logic doesn't work,
5403 so handle it here. */
5404 if (TREE_CLOBBER_P (*from_p))
5406 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5407 if (ret == GS_ERROR)
5408 return ret;
5409 gcc_assert (!want_value
5410 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
5411 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5412 *expr_p = NULL;
5413 return GS_ALL_DONE;
5416 /* Insert pointer conversions required by the middle-end that are not
5417 required by the frontend. This fixes middle-end type checking for
5418 for example gcc.dg/redecl-6.c. */
5419 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5421 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5422 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5423 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5426 /* See if any simplifications can be done based on what the RHS is. */
5427 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5428 want_value);
5429 if (ret != GS_UNHANDLED)
5430 return ret;
5432 /* For zero sized types only gimplify the left hand side and right hand
5433 side as statements and throw away the assignment. Do this after
5434 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5435 types properly. */
5436 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
5438 gimplify_stmt (from_p, pre_p);
5439 gimplify_stmt (to_p, pre_p);
5440 *expr_p = NULL_TREE;
5441 return GS_ALL_DONE;
5444 /* If the value being copied is of variable width, compute the length
5445 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5446 before gimplifying any of the operands so that we can resolve any
5447 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5448 the size of the expression to be copied, not of the destination, so
5449 that is what we must do here. */
5450 maybe_with_size_expr (from_p);
5452 /* As a special case, we have to temporarily allow for assignments
5453 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5454 a toplevel statement, when gimplifying the GENERIC expression
5455 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5456 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5458 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5459 prevent gimplify_expr from trying to create a new temporary for
5460 foo's LHS, we tell it that it should only gimplify until it
5461 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5462 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5463 and all we need to do here is set 'a' to be its LHS. */
5465 /* Gimplify the RHS first for C++17 and bug 71104. */
5466 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5467 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5468 if (ret == GS_ERROR)
5469 return ret;
5471 /* Then gimplify the LHS. */
5472 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5473 twice we have to make sure to gimplify into non-SSA as otherwise
5474 the abnormal edge added later will make those defs not dominate
5475 their uses.
5476 ??? Technically this applies only to the registers used in the
5477 resulting non-register *TO_P. */
5478 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5479 if (saved_into_ssa
5480 && TREE_CODE (*from_p) == CALL_EXPR
5481 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5482 gimplify_ctxp->into_ssa = false;
5483 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5484 gimplify_ctxp->into_ssa = saved_into_ssa;
5485 if (ret == GS_ERROR)
5486 return ret;
5488 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5489 guess for the predicate was wrong. */
5490 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5491 if (final_pred != initial_pred)
5493 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5494 if (ret == GS_ERROR)
5495 return ret;
5498 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5499 size as argument to the call. */
5500 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5502 tree call = TREE_OPERAND (*from_p, 0);
5503 tree vlasize = TREE_OPERAND (*from_p, 1);
5505 if (TREE_CODE (call) == CALL_EXPR
5506 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5508 int nargs = call_expr_nargs (call);
5509 tree type = TREE_TYPE (call);
5510 tree ap = CALL_EXPR_ARG (call, 0);
5511 tree tag = CALL_EXPR_ARG (call, 1);
5512 tree aptag = CALL_EXPR_ARG (call, 2);
5513 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5514 IFN_VA_ARG, type,
5515 nargs + 1, ap, tag,
5516 aptag, vlasize);
5517 TREE_OPERAND (*from_p, 0) = newcall;
5521 /* Now see if the above changed *from_p to something we handle specially. */
5522 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5523 want_value);
5524 if (ret != GS_UNHANDLED)
5525 return ret;
5527 /* If we've got a variable sized assignment between two lvalues (i.e. does
5528 not involve a call), then we can make things a bit more straightforward
5529 by converting the assignment to memcpy or memset. */
5530 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5532 tree from = TREE_OPERAND (*from_p, 0);
5533 tree size = TREE_OPERAND (*from_p, 1);
5535 if (TREE_CODE (from) == CONSTRUCTOR)
5536 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5538 if (is_gimple_addressable (from))
5540 *from_p = from;
5541 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5542 pre_p);
5546 /* Transform partial stores to non-addressable complex variables into
5547 total stores. This allows us to use real instead of virtual operands
5548 for these variables, which improves optimization. */
5549 if ((TREE_CODE (*to_p) == REALPART_EXPR
5550 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5551 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5552 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5554 /* Try to alleviate the effects of the gimplification creating artificial
5555 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5556 make sure not to create DECL_DEBUG_EXPR links across functions. */
5557 if (!gimplify_ctxp->into_ssa
5558 && VAR_P (*from_p)
5559 && DECL_IGNORED_P (*from_p)
5560 && DECL_P (*to_p)
5561 && !DECL_IGNORED_P (*to_p)
5562 && decl_function_context (*to_p) == current_function_decl)
5564 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5565 DECL_NAME (*from_p)
5566 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5567 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5568 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5571 if (want_value && TREE_THIS_VOLATILE (*to_p))
5572 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5574 if (TREE_CODE (*from_p) == CALL_EXPR)
5576 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5577 instead of a GIMPLE_ASSIGN. */
5578 gcall *call_stmt;
5579 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5581 /* Gimplify internal functions created in the FEs. */
5582 int nargs = call_expr_nargs (*from_p), i;
5583 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5584 auto_vec<tree> vargs (nargs);
5586 for (i = 0; i < nargs; i++)
5588 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5589 EXPR_LOCATION (*from_p));
5590 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5592 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5593 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5595 else
5597 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5598 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5599 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5600 tree fndecl = get_callee_fndecl (*from_p);
5601 if (fndecl
5602 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5603 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5604 && call_expr_nargs (*from_p) == 3)
5605 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5606 CALL_EXPR_ARG (*from_p, 0),
5607 CALL_EXPR_ARG (*from_p, 1),
5608 CALL_EXPR_ARG (*from_p, 2));
5609 else
5611 call_stmt = gimple_build_call_from_tree (*from_p);
5612 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
5615 notice_special_calls (call_stmt);
5616 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5617 gimple_call_set_lhs (call_stmt, *to_p);
5618 else if (TREE_CODE (*to_p) == SSA_NAME)
5619 /* The above is somewhat premature, avoid ICEing later for a
5620 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5621 ??? This doesn't make it a default-def. */
5622 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5623 assign = call_stmt;
5625 else
5627 assign = gimple_build_assign (*to_p, *from_p);
5628 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5629 if (COMPARISON_CLASS_P (*from_p))
5630 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5633 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5635 /* We should have got an SSA name from the start. */
5636 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5637 || ! gimple_in_ssa_p (cfun));
5640 gimplify_seq_add_stmt (pre_p, assign);
5641 gsi = gsi_last (*pre_p);
5642 maybe_fold_stmt (&gsi);
5644 if (want_value)
5646 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5647 return GS_OK;
5649 else
5650 *expr_p = NULL;
5652 return GS_ALL_DONE;
5655 /* Gimplify a comparison between two variable-sized objects. Do this
5656 with a call to BUILT_IN_MEMCMP. */
5658 static enum gimplify_status
5659 gimplify_variable_sized_compare (tree *expr_p)
5661 location_t loc = EXPR_LOCATION (*expr_p);
5662 tree op0 = TREE_OPERAND (*expr_p, 0);
5663 tree op1 = TREE_OPERAND (*expr_p, 1);
5664 tree t, arg, dest, src, expr;
5666 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5667 arg = unshare_expr (arg);
5668 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5669 src = build_fold_addr_expr_loc (loc, op1);
5670 dest = build_fold_addr_expr_loc (loc, op0);
5671 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5672 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5674 expr
5675 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5676 SET_EXPR_LOCATION (expr, loc);
5677 *expr_p = expr;
5679 return GS_OK;
5682 /* Gimplify a comparison between two aggregate objects of integral scalar
5683 mode as a comparison between the bitwise equivalent scalar values. */
5685 static enum gimplify_status
5686 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5688 location_t loc = EXPR_LOCATION (*expr_p);
5689 tree op0 = TREE_OPERAND (*expr_p, 0);
5690 tree op1 = TREE_OPERAND (*expr_p, 1);
5692 tree type = TREE_TYPE (op0);
5693 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5695 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5696 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5698 *expr_p
5699 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5701 return GS_OK;
5704 /* Gimplify an expression sequence. This function gimplifies each
5705 expression and rewrites the original expression with the last
5706 expression of the sequence in GIMPLE form.
5708 PRE_P points to the list where the side effects for all the
5709 expressions in the sequence will be emitted.
5711 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5713 static enum gimplify_status
5714 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5716 tree t = *expr_p;
5720 tree *sub_p = &TREE_OPERAND (t, 0);
5722 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5723 gimplify_compound_expr (sub_p, pre_p, false);
5724 else
5725 gimplify_stmt (sub_p, pre_p);
5727 t = TREE_OPERAND (t, 1);
5729 while (TREE_CODE (t) == COMPOUND_EXPR);
5731 *expr_p = t;
5732 if (want_value)
5733 return GS_OK;
5734 else
5736 gimplify_stmt (expr_p, pre_p);
5737 return GS_ALL_DONE;
5741 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5742 gimplify. After gimplification, EXPR_P will point to a new temporary
5743 that holds the original value of the SAVE_EXPR node.
5745 PRE_P points to the list where side effects that must happen before
5746 *EXPR_P should be stored. */
5748 static enum gimplify_status
5749 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5751 enum gimplify_status ret = GS_ALL_DONE;
5752 tree val;
5754 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5755 val = TREE_OPERAND (*expr_p, 0);
5757 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5758 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5760 /* The operand may be a void-valued expression such as SAVE_EXPRs
5761 generated by the Java frontend for class initialization. It is
5762 being executed only for its side-effects. */
5763 if (TREE_TYPE (val) == void_type_node)
5765 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5766 is_gimple_stmt, fb_none);
5767 val = NULL;
5769 else
5770 /* The temporary may not be an SSA name as later abnormal and EH
5771 control flow may invalidate use/def domination. */
5772 val = get_initialized_tmp_var (val, pre_p, post_p, false);
5774 TREE_OPERAND (*expr_p, 0) = val;
5775 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5778 *expr_p = val;
5780 return ret;
5783 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5785 unary_expr
5786 : ...
5787 | '&' varname
5790 PRE_P points to the list where side effects that must happen before
5791 *EXPR_P should be stored.
5793 POST_P points to the list where side effects that must happen after
5794 *EXPR_P should be stored. */
5796 static enum gimplify_status
5797 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5799 tree expr = *expr_p;
5800 tree op0 = TREE_OPERAND (expr, 0);
5801 enum gimplify_status ret;
5802 location_t loc = EXPR_LOCATION (*expr_p);
5804 switch (TREE_CODE (op0))
5806 case INDIRECT_REF:
5807 do_indirect_ref:
5808 /* Check if we are dealing with an expression of the form '&*ptr'.
5809 While the front end folds away '&*ptr' into 'ptr', these
5810 expressions may be generated internally by the compiler (e.g.,
5811 builtins like __builtin_va_end). */
5812 /* Caution: the silent array decomposition semantics we allow for
5813 ADDR_EXPR means we can't always discard the pair. */
5814 /* Gimplification of the ADDR_EXPR operand may drop
5815 cv-qualification conversions, so make sure we add them if
5816 needed. */
5818 tree op00 = TREE_OPERAND (op0, 0);
5819 tree t_expr = TREE_TYPE (expr);
5820 tree t_op00 = TREE_TYPE (op00);
5822 if (!useless_type_conversion_p (t_expr, t_op00))
5823 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5824 *expr_p = op00;
5825 ret = GS_OK;
5827 break;
5829 case VIEW_CONVERT_EXPR:
5830 /* Take the address of our operand and then convert it to the type of
5831 this ADDR_EXPR.
5833 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5834 all clear. The impact of this transformation is even less clear. */
5836 /* If the operand is a useless conversion, look through it. Doing so
5837 guarantees that the ADDR_EXPR and its operand will remain of the
5838 same type. */
5839 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5840 op0 = TREE_OPERAND (op0, 0);
5842 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5843 build_fold_addr_expr_loc (loc,
5844 TREE_OPERAND (op0, 0)));
5845 ret = GS_OK;
5846 break;
5848 case MEM_REF:
5849 if (integer_zerop (TREE_OPERAND (op0, 1)))
5850 goto do_indirect_ref;
5852 /* fall through */
5854 default:
5855 /* If we see a call to a declared builtin or see its address
5856 being taken (we can unify those cases here) then we can mark
5857 the builtin for implicit generation by GCC. */
5858 if (TREE_CODE (op0) == FUNCTION_DECL
5859 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5860 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5861 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5863 /* We use fb_either here because the C frontend sometimes takes
5864 the address of a call that returns a struct; see
5865 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5866 the implied temporary explicit. */
5868 /* Make the operand addressable. */
5869 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5870 is_gimple_addressable, fb_either);
5871 if (ret == GS_ERROR)
5872 break;
5874 /* Then mark it. Beware that it may not be possible to do so directly
5875 if a temporary has been created by the gimplification. */
5876 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5878 op0 = TREE_OPERAND (expr, 0);
5880 /* For various reasons, the gimplification of the expression
5881 may have made a new INDIRECT_REF. */
5882 if (TREE_CODE (op0) == INDIRECT_REF)
5883 goto do_indirect_ref;
5885 mark_addressable (TREE_OPERAND (expr, 0));
5887 /* The FEs may end up building ADDR_EXPRs early on a decl with
5888 an incomplete type. Re-build ADDR_EXPRs in canonical form
5889 here. */
5890 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5891 *expr_p = build_fold_addr_expr (op0);
5893 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5894 recompute_tree_invariant_for_addr_expr (*expr_p);
5896 /* If we re-built the ADDR_EXPR add a conversion to the original type
5897 if required. */
5898 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5899 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5901 break;
5904 return ret;
5907 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5908 value; output operands should be a gimple lvalue. */
5910 static enum gimplify_status
5911 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5913 tree expr;
5914 int noutputs;
5915 const char **oconstraints;
5916 int i;
5917 tree link;
5918 const char *constraint;
5919 bool allows_mem, allows_reg, is_inout;
5920 enum gimplify_status ret, tret;
5921 gasm *stmt;
5922 vec<tree, va_gc> *inputs;
5923 vec<tree, va_gc> *outputs;
5924 vec<tree, va_gc> *clobbers;
5925 vec<tree, va_gc> *labels;
5926 tree link_next;
5928 expr = *expr_p;
5929 noutputs = list_length (ASM_OUTPUTS (expr));
5930 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5932 inputs = NULL;
5933 outputs = NULL;
5934 clobbers = NULL;
5935 labels = NULL;
5937 ret = GS_ALL_DONE;
5938 link_next = NULL_TREE;
5939 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5941 bool ok;
5942 size_t constraint_len;
5944 link_next = TREE_CHAIN (link);
5946 oconstraints[i]
5947 = constraint
5948 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5949 constraint_len = strlen (constraint);
5950 if (constraint_len == 0)
5951 continue;
5953 ok = parse_output_constraint (&constraint, i, 0, 0,
5954 &allows_mem, &allows_reg, &is_inout);
5955 if (!ok)
5957 ret = GS_ERROR;
5958 is_inout = false;
5961 if (!allows_reg && allows_mem)
5962 mark_addressable (TREE_VALUE (link));
5964 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5965 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5966 fb_lvalue | fb_mayfail);
5967 if (tret == GS_ERROR)
5969 error ("invalid lvalue in asm output %d", i);
5970 ret = tret;
5973 /* If the constraint does not allow memory make sure we gimplify
5974 it to a register if it is not already but its base is. This
5975 happens for complex and vector components. */
5976 if (!allows_mem)
5978 tree op = TREE_VALUE (link);
5979 if (! is_gimple_val (op)
5980 && is_gimple_reg_type (TREE_TYPE (op))
5981 && is_gimple_reg (get_base_address (op)))
5983 tree tem = create_tmp_reg (TREE_TYPE (op));
5984 tree ass;
5985 if (is_inout)
5987 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
5988 tem, unshare_expr (op));
5989 gimplify_and_add (ass, pre_p);
5991 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
5992 gimplify_and_add (ass, post_p);
5994 TREE_VALUE (link) = tem;
5995 tret = GS_OK;
5999 vec_safe_push (outputs, link);
6000 TREE_CHAIN (link) = NULL_TREE;
6002 if (is_inout)
6004 /* An input/output operand. To give the optimizers more
6005 flexibility, split it into separate input and output
6006 operands. */
6007 tree input;
6008 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6009 char buf[11];
6011 /* Turn the in/out constraint into an output constraint. */
6012 char *p = xstrdup (constraint);
6013 p[0] = '=';
6014 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6016 /* And add a matching input constraint. */
6017 if (allows_reg)
6019 sprintf (buf, "%u", i);
6021 /* If there are multiple alternatives in the constraint,
6022 handle each of them individually. Those that allow register
6023 will be replaced with operand number, the others will stay
6024 unchanged. */
6025 if (strchr (p, ',') != NULL)
6027 size_t len = 0, buflen = strlen (buf);
6028 char *beg, *end, *str, *dst;
6030 for (beg = p + 1;;)
6032 end = strchr (beg, ',');
6033 if (end == NULL)
6034 end = strchr (beg, '\0');
6035 if ((size_t) (end - beg) < buflen)
6036 len += buflen + 1;
6037 else
6038 len += end - beg + 1;
6039 if (*end)
6040 beg = end + 1;
6041 else
6042 break;
6045 str = (char *) alloca (len);
6046 for (beg = p + 1, dst = str;;)
6048 const char *tem;
6049 bool mem_p, reg_p, inout_p;
6051 end = strchr (beg, ',');
6052 if (end)
6053 *end = '\0';
6054 beg[-1] = '=';
6055 tem = beg - 1;
6056 parse_output_constraint (&tem, i, 0, 0,
6057 &mem_p, &reg_p, &inout_p);
6058 if (dst != str)
6059 *dst++ = ',';
6060 if (reg_p)
6062 memcpy (dst, buf, buflen);
6063 dst += buflen;
6065 else
6067 if (end)
6068 len = end - beg;
6069 else
6070 len = strlen (beg);
6071 memcpy (dst, beg, len);
6072 dst += len;
6074 if (end)
6075 beg = end + 1;
6076 else
6077 break;
6079 *dst = '\0';
6080 input = build_string (dst - str, str);
6082 else
6083 input = build_string (strlen (buf), buf);
6085 else
6086 input = build_string (constraint_len - 1, constraint + 1);
6088 free (p);
6090 input = build_tree_list (build_tree_list (NULL_TREE, input),
6091 unshare_expr (TREE_VALUE (link)));
6092 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6096 link_next = NULL_TREE;
6097 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6099 link_next = TREE_CHAIN (link);
6100 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6101 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6102 oconstraints, &allows_mem, &allows_reg);
6104 /* If we can't make copies, we can only accept memory. */
6105 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6107 if (allows_mem)
6108 allows_reg = 0;
6109 else
6111 error ("impossible constraint in %<asm%>");
6112 error ("non-memory input %d must stay in memory", i);
6113 return GS_ERROR;
6117 /* If the operand is a memory input, it should be an lvalue. */
6118 if (!allows_reg && allows_mem)
6120 tree inputv = TREE_VALUE (link);
6121 STRIP_NOPS (inputv);
6122 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6123 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6124 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6125 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6126 || TREE_CODE (inputv) == MODIFY_EXPR)
6127 TREE_VALUE (link) = error_mark_node;
6128 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6129 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6130 if (tret != GS_ERROR)
6132 /* Unlike output operands, memory inputs are not guaranteed
6133 to be lvalues by the FE, and while the expressions are
6134 marked addressable there, if it is e.g. a statement
6135 expression, temporaries in it might not end up being
6136 addressable. They might be already used in the IL and thus
6137 it is too late to make them addressable now though. */
6138 tree x = TREE_VALUE (link);
6139 while (handled_component_p (x))
6140 x = TREE_OPERAND (x, 0);
6141 if (TREE_CODE (x) == MEM_REF
6142 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6143 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6144 if ((VAR_P (x)
6145 || TREE_CODE (x) == PARM_DECL
6146 || TREE_CODE (x) == RESULT_DECL)
6147 && !TREE_ADDRESSABLE (x)
6148 && is_gimple_reg (x))
6150 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6151 input_location), 0,
6152 "memory input %d is not directly addressable",
6154 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6157 mark_addressable (TREE_VALUE (link));
6158 if (tret == GS_ERROR)
6160 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6161 "memory input %d is not directly addressable", i);
6162 ret = tret;
6165 else
6167 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6168 is_gimple_asm_val, fb_rvalue);
6169 if (tret == GS_ERROR)
6170 ret = tret;
6173 TREE_CHAIN (link) = NULL_TREE;
6174 vec_safe_push (inputs, link);
6177 link_next = NULL_TREE;
6178 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6180 link_next = TREE_CHAIN (link);
6181 TREE_CHAIN (link) = NULL_TREE;
6182 vec_safe_push (clobbers, link);
6185 link_next = NULL_TREE;
6186 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6188 link_next = TREE_CHAIN (link);
6189 TREE_CHAIN (link) = NULL_TREE;
6190 vec_safe_push (labels, link);
6193 /* Do not add ASMs with errors to the gimple IL stream. */
6194 if (ret != GS_ERROR)
6196 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6197 inputs, outputs, clobbers, labels);
6199 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6200 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6202 gimplify_seq_add_stmt (pre_p, stmt);
6205 return ret;
6208 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6209 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6210 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6211 return to this function.
6213 FIXME should we complexify the prequeue handling instead? Or use flags
6214 for all the cleanups and let the optimizer tighten them up? The current
6215 code seems pretty fragile; it will break on a cleanup within any
6216 non-conditional nesting. But any such nesting would be broken, anyway;
6217 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6218 and continues out of it. We can do that at the RTL level, though, so
6219 having an optimizer to tighten up try/finally regions would be a Good
6220 Thing. */
6222 static enum gimplify_status
6223 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6225 gimple_stmt_iterator iter;
6226 gimple_seq body_sequence = NULL;
6228 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6230 /* We only care about the number of conditions between the innermost
6231 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6232 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6233 int old_conds = gimplify_ctxp->conditions;
6234 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6235 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6236 gimplify_ctxp->conditions = 0;
6237 gimplify_ctxp->conditional_cleanups = NULL;
6238 gimplify_ctxp->in_cleanup_point_expr = true;
6240 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6242 gimplify_ctxp->conditions = old_conds;
6243 gimplify_ctxp->conditional_cleanups = old_cleanups;
6244 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6246 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6248 gimple *wce = gsi_stmt (iter);
6250 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6252 if (gsi_one_before_end_p (iter))
6254 /* Note that gsi_insert_seq_before and gsi_remove do not
6255 scan operands, unlike some other sequence mutators. */
6256 if (!gimple_wce_cleanup_eh_only (wce))
6257 gsi_insert_seq_before_without_update (&iter,
6258 gimple_wce_cleanup (wce),
6259 GSI_SAME_STMT);
6260 gsi_remove (&iter, true);
6261 break;
6263 else
6265 gtry *gtry;
6266 gimple_seq seq;
6267 enum gimple_try_flags kind;
6269 if (gimple_wce_cleanup_eh_only (wce))
6270 kind = GIMPLE_TRY_CATCH;
6271 else
6272 kind = GIMPLE_TRY_FINALLY;
6273 seq = gsi_split_seq_after (iter);
6275 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6276 /* Do not use gsi_replace here, as it may scan operands.
6277 We want to do a simple structural modification only. */
6278 gsi_set_stmt (&iter, gtry);
6279 iter = gsi_start (gtry->eval);
6282 else
6283 gsi_next (&iter);
6286 gimplify_seq_add_seq (pre_p, body_sequence);
6287 if (temp)
6289 *expr_p = temp;
6290 return GS_OK;
6292 else
6294 *expr_p = NULL;
6295 return GS_ALL_DONE;
6299 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6300 is the cleanup action required. EH_ONLY is true if the cleanup should
6301 only be executed if an exception is thrown, not on normal exit.
6302 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6303 only valid for clobbers. */
6305 static void
6306 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6307 bool force_uncond = false)
6309 gimple *wce;
6310 gimple_seq cleanup_stmts = NULL;
6312 /* Errors can result in improperly nested cleanups. Which results in
6313 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6314 if (seen_error ())
6315 return;
6317 if (gimple_conditional_context ())
6319 /* If we're in a conditional context, this is more complex. We only
6320 want to run the cleanup if we actually ran the initialization that
6321 necessitates it, but we want to run it after the end of the
6322 conditional context. So we wrap the try/finally around the
6323 condition and use a flag to determine whether or not to actually
6324 run the destructor. Thus
6326 test ? f(A()) : 0
6328 becomes (approximately)
6330 flag = 0;
6331 try {
6332 if (test) { A::A(temp); flag = 1; val = f(temp); }
6333 else { val = 0; }
6334 } finally {
6335 if (flag) A::~A(temp);
6339 if (force_uncond)
6341 gimplify_stmt (&cleanup, &cleanup_stmts);
6342 wce = gimple_build_wce (cleanup_stmts);
6343 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6345 else
6347 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6348 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6349 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6351 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6352 gimplify_stmt (&cleanup, &cleanup_stmts);
6353 wce = gimple_build_wce (cleanup_stmts);
6355 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6356 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6357 gimplify_seq_add_stmt (pre_p, ftrue);
6359 /* Because of this manipulation, and the EH edges that jump
6360 threading cannot redirect, the temporary (VAR) will appear
6361 to be used uninitialized. Don't warn. */
6362 TREE_NO_WARNING (var) = 1;
6365 else
6367 gimplify_stmt (&cleanup, &cleanup_stmts);
6368 wce = gimple_build_wce (cleanup_stmts);
6369 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6370 gimplify_seq_add_stmt (pre_p, wce);
6374 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6376 static enum gimplify_status
6377 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6379 tree targ = *expr_p;
6380 tree temp = TARGET_EXPR_SLOT (targ);
6381 tree init = TARGET_EXPR_INITIAL (targ);
6382 enum gimplify_status ret;
6384 bool unpoison_empty_seq = false;
6385 gimple_stmt_iterator unpoison_it;
6387 if (init)
6389 tree cleanup = NULL_TREE;
6391 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6392 to the temps list. Handle also variable length TARGET_EXPRs. */
6393 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6395 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6396 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6397 gimplify_vla_decl (temp, pre_p);
6399 else
6401 /* Save location where we need to place unpoisoning. It's possible
6402 that a variable will be converted to needs_to_live_in_memory. */
6403 unpoison_it = gsi_last (*pre_p);
6404 unpoison_empty_seq = gsi_end_p (unpoison_it);
6406 gimple_add_tmp_var (temp);
6409 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6410 expression is supposed to initialize the slot. */
6411 if (VOID_TYPE_P (TREE_TYPE (init)))
6412 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6413 else
6415 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6416 init = init_expr;
6417 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6418 init = NULL;
6419 ggc_free (init_expr);
6421 if (ret == GS_ERROR)
6423 /* PR c++/28266 Make sure this is expanded only once. */
6424 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6425 return GS_ERROR;
6427 if (init)
6428 gimplify_and_add (init, pre_p);
6430 /* If needed, push the cleanup for the temp. */
6431 if (TARGET_EXPR_CLEANUP (targ))
6433 if (CLEANUP_EH_ONLY (targ))
6434 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6435 CLEANUP_EH_ONLY (targ), pre_p);
6436 else
6437 cleanup = TARGET_EXPR_CLEANUP (targ);
6440 /* Add a clobber for the temporary going out of scope, like
6441 gimplify_bind_expr. */
6442 if (gimplify_ctxp->in_cleanup_point_expr
6443 && needs_to_live_in_memory (temp))
6445 if (flag_stack_reuse == SR_ALL)
6447 tree clobber = build_constructor (TREE_TYPE (temp),
6448 NULL);
6449 TREE_THIS_VOLATILE (clobber) = true;
6450 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6451 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6453 if (asan_poisoned_variables && dbg_cnt (asan_use_after_scope))
6455 tree asan_cleanup = build_asan_poison_call_expr (temp);
6456 if (asan_cleanup)
6458 if (unpoison_empty_seq)
6459 unpoison_it = gsi_start (*pre_p);
6461 asan_poison_variable (temp, false, &unpoison_it,
6462 unpoison_empty_seq);
6463 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6467 if (cleanup)
6468 gimple_push_cleanup (temp, cleanup, false, pre_p);
6470 /* Only expand this once. */
6471 TREE_OPERAND (targ, 3) = init;
6472 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6474 else
6475 /* We should have expanded this before. */
6476 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6478 *expr_p = temp;
6479 return GS_OK;
6482 /* Gimplification of expression trees. */
6484 /* Gimplify an expression which appears at statement context. The
6485 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6486 NULL, a new sequence is allocated.
6488 Return true if we actually added a statement to the queue. */
6490 bool
6491 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6493 gimple_seq_node last;
6495 last = gimple_seq_last (*seq_p);
6496 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6497 return last != gimple_seq_last (*seq_p);
6500 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6501 to CTX. If entries already exist, force them to be some flavor of private.
6502 If there is no enclosing parallel, do nothing. */
6504 void
6505 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6507 splay_tree_node n;
6509 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6510 return;
6514 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6515 if (n != NULL)
6517 if (n->value & GOVD_SHARED)
6518 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6519 else if (n->value & GOVD_MAP)
6520 n->value |= GOVD_MAP_TO_ONLY;
6521 else
6522 return;
6524 else if ((ctx->region_type & ORT_TARGET) != 0)
6526 if (ctx->target_map_scalars_firstprivate)
6527 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6528 else
6529 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6531 else if (ctx->region_type != ORT_WORKSHARE
6532 && ctx->region_type != ORT_SIMD
6533 && ctx->region_type != ORT_ACC
6534 && !(ctx->region_type & ORT_TARGET_DATA))
6535 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6537 ctx = ctx->outer_context;
6539 while (ctx);
6542 /* Similarly for each of the type sizes of TYPE. */
6544 static void
6545 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6547 if (type == NULL || type == error_mark_node)
6548 return;
6549 type = TYPE_MAIN_VARIANT (type);
6551 if (ctx->privatized_types->add (type))
6552 return;
6554 switch (TREE_CODE (type))
6556 case INTEGER_TYPE:
6557 case ENUMERAL_TYPE:
6558 case BOOLEAN_TYPE:
6559 case REAL_TYPE:
6560 case FIXED_POINT_TYPE:
6561 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6562 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6563 break;
6565 case ARRAY_TYPE:
6566 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6567 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6568 break;
6570 case RECORD_TYPE:
6571 case UNION_TYPE:
6572 case QUAL_UNION_TYPE:
6574 tree field;
6575 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6576 if (TREE_CODE (field) == FIELD_DECL)
6578 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6579 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6582 break;
6584 case POINTER_TYPE:
6585 case REFERENCE_TYPE:
6586 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6587 break;
6589 default:
6590 break;
6593 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6594 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6595 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6598 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6600 static void
6601 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6603 splay_tree_node n;
6604 unsigned int nflags;
6605 tree t;
6607 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6608 return;
6610 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6611 there are constructors involved somewhere. */
6612 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
6613 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
6614 flags |= GOVD_SEEN;
6616 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6617 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6619 /* We shouldn't be re-adding the decl with the same data
6620 sharing class. */
6621 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6622 nflags = n->value | flags;
6623 /* The only combination of data sharing classes we should see is
6624 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6625 reduction variables to be used in data sharing clauses. */
6626 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6627 || ((nflags & GOVD_DATA_SHARE_CLASS)
6628 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6629 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6630 n->value = nflags;
6631 return;
6634 /* When adding a variable-sized variable, we have to handle all sorts
6635 of additional bits of data: the pointer replacement variable, and
6636 the parameters of the type. */
6637 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6639 /* Add the pointer replacement variable as PRIVATE if the variable
6640 replacement is private, else FIRSTPRIVATE since we'll need the
6641 address of the original variable either for SHARED, or for the
6642 copy into or out of the context. */
6643 if (!(flags & GOVD_LOCAL))
6645 if (flags & GOVD_MAP)
6646 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6647 else if (flags & GOVD_PRIVATE)
6648 nflags = GOVD_PRIVATE;
6649 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6650 && (flags & GOVD_FIRSTPRIVATE))
6651 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6652 else
6653 nflags = GOVD_FIRSTPRIVATE;
6654 nflags |= flags & GOVD_SEEN;
6655 t = DECL_VALUE_EXPR (decl);
6656 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6657 t = TREE_OPERAND (t, 0);
6658 gcc_assert (DECL_P (t));
6659 omp_add_variable (ctx, t, nflags);
6662 /* Add all of the variable and type parameters (which should have
6663 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6664 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6665 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6666 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6668 /* The variable-sized variable itself is never SHARED, only some form
6669 of PRIVATE. The sharing would take place via the pointer variable
6670 which we remapped above. */
6671 if (flags & GOVD_SHARED)
6672 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
6673 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6675 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6676 alloca statement we generate for the variable, so make sure it
6677 is available. This isn't automatically needed for the SHARED
6678 case, since we won't be allocating local storage then.
6679 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6680 in this case omp_notice_variable will be called later
6681 on when it is gimplified. */
6682 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6683 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6684 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6686 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6687 && lang_hooks.decls.omp_privatize_by_reference (decl))
6689 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6691 /* Similar to the direct variable sized case above, we'll need the
6692 size of references being privatized. */
6693 if ((flags & GOVD_SHARED) == 0)
6695 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6696 if (DECL_P (t))
6697 omp_notice_variable (ctx, t, true);
6701 if (n != NULL)
6702 n->value |= flags;
6703 else
6704 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6706 /* For reductions clauses in OpenACC loop directives, by default create a
6707 copy clause on the enclosing parallel construct for carrying back the
6708 results. */
6709 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6711 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6712 while (outer_ctx)
6714 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6715 if (n != NULL)
6717 /* Ignore local variables and explicitly declared clauses. */
6718 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6719 break;
6720 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6722 /* According to the OpenACC spec, such a reduction variable
6723 should already have a copy map on a kernels construct,
6724 verify that here. */
6725 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6726 && (n->value & GOVD_MAP));
6728 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6730 /* Remove firstprivate and make it a copy map. */
6731 n->value &= ~GOVD_FIRSTPRIVATE;
6732 n->value |= GOVD_MAP;
6735 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6737 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6738 GOVD_MAP | GOVD_SEEN);
6739 break;
6741 outer_ctx = outer_ctx->outer_context;
6746 /* Notice a threadprivate variable DECL used in OMP context CTX.
6747 This just prints out diagnostics about threadprivate variable uses
6748 in untied tasks. If DECL2 is non-NULL, prevent this warning
6749 on that variable. */
6751 static bool
6752 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6753 tree decl2)
6755 splay_tree_node n;
6756 struct gimplify_omp_ctx *octx;
6758 for (octx = ctx; octx; octx = octx->outer_context)
6759 if ((octx->region_type & ORT_TARGET) != 0)
6761 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6762 if (n == NULL)
6764 error ("threadprivate variable %qE used in target region",
6765 DECL_NAME (decl));
6766 error_at (octx->location, "enclosing target region");
6767 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6769 if (decl2)
6770 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6773 if (ctx->region_type != ORT_UNTIED_TASK)
6774 return false;
6775 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6776 if (n == NULL)
6778 error ("threadprivate variable %qE used in untied task",
6779 DECL_NAME (decl));
6780 error_at (ctx->location, "enclosing task");
6781 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6783 if (decl2)
6784 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6785 return false;
6788 /* Return true if global var DECL is device resident. */
6790 static bool
6791 device_resident_p (tree decl)
6793 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6795 if (!attr)
6796 return false;
6798 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6800 tree c = TREE_VALUE (t);
6801 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6802 return true;
6805 return false;
6808 /* Return true if DECL has an ACC DECLARE attribute. */
6810 static bool
6811 is_oacc_declared (tree decl)
6813 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6814 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6815 return declared != NULL_TREE;
6818 /* Determine outer default flags for DECL mentioned in an OMP region
6819 but not declared in an enclosing clause.
6821 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6822 remapped firstprivate instead of shared. To some extent this is
6823 addressed in omp_firstprivatize_type_sizes, but not
6824 effectively. */
6826 static unsigned
6827 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6828 bool in_code, unsigned flags)
6830 enum omp_clause_default_kind default_kind = ctx->default_kind;
6831 enum omp_clause_default_kind kind;
6833 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6834 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6835 default_kind = kind;
6837 switch (default_kind)
6839 case OMP_CLAUSE_DEFAULT_NONE:
6841 const char *rtype;
6843 if (ctx->region_type & ORT_PARALLEL)
6844 rtype = "parallel";
6845 else if (ctx->region_type & ORT_TASK)
6846 rtype = "task";
6847 else if (ctx->region_type & ORT_TEAMS)
6848 rtype = "teams";
6849 else
6850 gcc_unreachable ();
6852 error ("%qE not specified in enclosing %qs",
6853 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6854 error_at (ctx->location, "enclosing %qs", rtype);
6856 /* FALLTHRU */
6857 case OMP_CLAUSE_DEFAULT_SHARED:
6858 flags |= GOVD_SHARED;
6859 break;
6860 case OMP_CLAUSE_DEFAULT_PRIVATE:
6861 flags |= GOVD_PRIVATE;
6862 break;
6863 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6864 flags |= GOVD_FIRSTPRIVATE;
6865 break;
6866 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6867 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6868 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6869 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6871 omp_notice_variable (octx, decl, in_code);
6872 for (; octx; octx = octx->outer_context)
6874 splay_tree_node n2;
6876 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6877 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6878 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6879 continue;
6880 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6882 flags |= GOVD_FIRSTPRIVATE;
6883 goto found_outer;
6885 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6887 flags |= GOVD_SHARED;
6888 goto found_outer;
6893 if (TREE_CODE (decl) == PARM_DECL
6894 || (!is_global_var (decl)
6895 && DECL_CONTEXT (decl) == current_function_decl))
6896 flags |= GOVD_FIRSTPRIVATE;
6897 else
6898 flags |= GOVD_SHARED;
6899 found_outer:
6900 break;
6902 default:
6903 gcc_unreachable ();
6906 return flags;
6910 /* Determine outer default flags for DECL mentioned in an OACC region
6911 but not declared in an enclosing clause. */
6913 static unsigned
6914 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6916 const char *rkind;
6917 bool on_device = false;
6918 bool declared = is_oacc_declared (decl);
6919 tree type = TREE_TYPE (decl);
6921 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6922 type = TREE_TYPE (type);
6924 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6925 && is_global_var (decl)
6926 && device_resident_p (decl))
6928 on_device = true;
6929 flags |= GOVD_MAP_TO_ONLY;
6932 switch (ctx->region_type)
6934 default:
6935 gcc_unreachable ();
6937 case ORT_ACC_KERNELS:
6938 /* Scalars are default 'copy' under kernels, non-scalars are default
6939 'present_or_copy'. */
6940 flags |= GOVD_MAP;
6941 if (!AGGREGATE_TYPE_P (type))
6942 flags |= GOVD_MAP_FORCE;
6944 rkind = "kernels";
6945 break;
6947 case ORT_ACC_PARALLEL:
6949 if (on_device || AGGREGATE_TYPE_P (type) || declared)
6950 /* Aggregates default to 'present_or_copy'. */
6951 flags |= GOVD_MAP;
6952 else
6953 /* Scalars default to 'firstprivate'. */
6954 flags |= GOVD_FIRSTPRIVATE;
6955 rkind = "parallel";
6957 break;
6960 if (DECL_ARTIFICIAL (decl))
6961 ; /* We can get compiler-generated decls, and should not complain
6962 about them. */
6963 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
6965 error ("%qE not specified in enclosing OpenACC %qs construct",
6966 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
6967 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
6969 else
6970 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
6972 return flags;
6975 /* Record the fact that DECL was used within the OMP context CTX.
6976 IN_CODE is true when real code uses DECL, and false when we should
6977 merely emit default(none) errors. Return true if DECL is going to
6978 be remapped and thus DECL shouldn't be gimplified into its
6979 DECL_VALUE_EXPR (if any). */
6981 static bool
6982 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
6984 splay_tree_node n;
6985 unsigned flags = in_code ? GOVD_SEEN : 0;
6986 bool ret = false, shared;
6988 if (error_operand_p (decl))
6989 return false;
6991 if (ctx->region_type == ORT_NONE)
6992 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
6994 if (is_global_var (decl))
6996 /* Threadprivate variables are predetermined. */
6997 if (DECL_THREAD_LOCAL_P (decl))
6998 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7000 if (DECL_HAS_VALUE_EXPR_P (decl))
7002 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7004 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7005 return omp_notice_threadprivate_variable (ctx, decl, value);
7008 if (gimplify_omp_ctxp->outer_context == NULL
7009 && VAR_P (decl)
7010 && oacc_get_fn_attrib (current_function_decl))
7012 location_t loc = DECL_SOURCE_LOCATION (decl);
7014 if (lookup_attribute ("omp declare target link",
7015 DECL_ATTRIBUTES (decl)))
7017 error_at (loc,
7018 "%qE with %<link%> clause used in %<routine%> function",
7019 DECL_NAME (decl));
7020 return false;
7022 else if (!lookup_attribute ("omp declare target",
7023 DECL_ATTRIBUTES (decl)))
7025 error_at (loc,
7026 "%qE requires a %<declare%> directive for use "
7027 "in a %<routine%> function", DECL_NAME (decl));
7028 return false;
7033 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7034 if ((ctx->region_type & ORT_TARGET) != 0)
7036 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7037 if (n == NULL)
7039 unsigned nflags = flags;
7040 if (ctx->target_map_pointers_as_0len_arrays
7041 || ctx->target_map_scalars_firstprivate)
7043 bool is_declare_target = false;
7044 bool is_scalar = false;
7045 if (is_global_var (decl)
7046 && varpool_node::get_create (decl)->offloadable)
7048 struct gimplify_omp_ctx *octx;
7049 for (octx = ctx->outer_context;
7050 octx; octx = octx->outer_context)
7052 n = splay_tree_lookup (octx->variables,
7053 (splay_tree_key)decl);
7054 if (n
7055 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7056 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7057 break;
7059 is_declare_target = octx == NULL;
7061 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
7062 is_scalar = lang_hooks.decls.omp_scalar_p (decl);
7063 if (is_declare_target)
7065 else if (ctx->target_map_pointers_as_0len_arrays
7066 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7067 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7068 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7069 == POINTER_TYPE)))
7070 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7071 else if (is_scalar)
7072 nflags |= GOVD_FIRSTPRIVATE;
7075 struct gimplify_omp_ctx *octx = ctx->outer_context;
7076 if ((ctx->region_type & ORT_ACC) && octx)
7078 /* Look in outer OpenACC contexts, to see if there's a
7079 data attribute for this variable. */
7080 omp_notice_variable (octx, decl, in_code);
7082 for (; octx; octx = octx->outer_context)
7084 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7085 break;
7086 splay_tree_node n2
7087 = splay_tree_lookup (octx->variables,
7088 (splay_tree_key) decl);
7089 if (n2)
7091 if (octx->region_type == ORT_ACC_HOST_DATA)
7092 error ("variable %qE declared in enclosing "
7093 "%<host_data%> region", DECL_NAME (decl));
7094 nflags |= GOVD_MAP;
7095 if (octx->region_type == ORT_ACC_DATA
7096 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7097 nflags |= GOVD_MAP_0LEN_ARRAY;
7098 goto found_outer;
7104 tree type = TREE_TYPE (decl);
7106 if (nflags == flags
7107 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7108 && lang_hooks.decls.omp_privatize_by_reference (decl))
7109 type = TREE_TYPE (type);
7110 if (nflags == flags
7111 && !lang_hooks.types.omp_mappable_type (type))
7113 error ("%qD referenced in target region does not have "
7114 "a mappable type", decl);
7115 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7117 else if (nflags == flags)
7119 if ((ctx->region_type & ORT_ACC) != 0)
7120 nflags = oacc_default_clause (ctx, decl, flags);
7121 else
7122 nflags |= GOVD_MAP;
7125 found_outer:
7126 omp_add_variable (ctx, decl, nflags);
7128 else
7130 /* If nothing changed, there's nothing left to do. */
7131 if ((n->value & flags) == flags)
7132 return ret;
7133 flags |= n->value;
7134 n->value = flags;
7136 goto do_outer;
7139 if (n == NULL)
7141 if (ctx->region_type == ORT_WORKSHARE
7142 || ctx->region_type == ORT_SIMD
7143 || ctx->region_type == ORT_ACC
7144 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7145 goto do_outer;
7147 flags = omp_default_clause (ctx, decl, in_code, flags);
7149 if ((flags & GOVD_PRIVATE)
7150 && lang_hooks.decls.omp_private_outer_ref (decl))
7151 flags |= GOVD_PRIVATE_OUTER_REF;
7153 omp_add_variable (ctx, decl, flags);
7155 shared = (flags & GOVD_SHARED) != 0;
7156 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7157 goto do_outer;
7160 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7161 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7162 && DECL_SIZE (decl))
7164 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7166 splay_tree_node n2;
7167 tree t = DECL_VALUE_EXPR (decl);
7168 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7169 t = TREE_OPERAND (t, 0);
7170 gcc_assert (DECL_P (t));
7171 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7172 n2->value |= GOVD_SEEN;
7174 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7175 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7176 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7177 != INTEGER_CST))
7179 splay_tree_node n2;
7180 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7181 gcc_assert (DECL_P (t));
7182 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7183 if (n2)
7184 omp_notice_variable (ctx, t, true);
7188 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7189 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7191 /* If nothing changed, there's nothing left to do. */
7192 if ((n->value & flags) == flags)
7193 return ret;
7194 flags |= n->value;
7195 n->value = flags;
7197 do_outer:
7198 /* If the variable is private in the current context, then we don't
7199 need to propagate anything to an outer context. */
7200 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7201 return ret;
7202 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7203 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7204 return ret;
7205 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7206 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7207 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7208 return ret;
7209 if (ctx->outer_context
7210 && omp_notice_variable (ctx->outer_context, decl, in_code))
7211 return true;
7212 return ret;
7215 /* Verify that DECL is private within CTX. If there's specific information
7216 to the contrary in the innermost scope, generate an error. */
7218 static bool
7219 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7221 splay_tree_node n;
7223 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7224 if (n != NULL)
7226 if (n->value & GOVD_SHARED)
7228 if (ctx == gimplify_omp_ctxp)
7230 if (simd)
7231 error ("iteration variable %qE is predetermined linear",
7232 DECL_NAME (decl));
7233 else
7234 error ("iteration variable %qE should be private",
7235 DECL_NAME (decl));
7236 n->value = GOVD_PRIVATE;
7237 return true;
7239 else
7240 return false;
7242 else if ((n->value & GOVD_EXPLICIT) != 0
7243 && (ctx == gimplify_omp_ctxp
7244 || (ctx->region_type == ORT_COMBINED_PARALLEL
7245 && gimplify_omp_ctxp->outer_context == ctx)))
7247 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7248 error ("iteration variable %qE should not be firstprivate",
7249 DECL_NAME (decl));
7250 else if ((n->value & GOVD_REDUCTION) != 0)
7251 error ("iteration variable %qE should not be reduction",
7252 DECL_NAME (decl));
7253 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7254 error ("iteration variable %qE should not be linear",
7255 DECL_NAME (decl));
7256 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7257 error ("iteration variable %qE should not be lastprivate",
7258 DECL_NAME (decl));
7259 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7260 error ("iteration variable %qE should not be private",
7261 DECL_NAME (decl));
7262 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7263 error ("iteration variable %qE is predetermined linear",
7264 DECL_NAME (decl));
7266 return (ctx == gimplify_omp_ctxp
7267 || (ctx->region_type == ORT_COMBINED_PARALLEL
7268 && gimplify_omp_ctxp->outer_context == ctx));
7271 if (ctx->region_type != ORT_WORKSHARE
7272 && ctx->region_type != ORT_SIMD
7273 && ctx->region_type != ORT_ACC)
7274 return false;
7275 else if (ctx->outer_context)
7276 return omp_is_private (ctx->outer_context, decl, simd);
7277 return false;
7280 /* Return true if DECL is private within a parallel region
7281 that binds to the current construct's context or in parallel
7282 region's REDUCTION clause. */
7284 static bool
7285 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7287 splay_tree_node n;
7291 ctx = ctx->outer_context;
7292 if (ctx == NULL)
7294 if (is_global_var (decl))
7295 return false;
7297 /* References might be private, but might be shared too,
7298 when checking for copyprivate, assume they might be
7299 private, otherwise assume they might be shared. */
7300 if (copyprivate)
7301 return true;
7303 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7304 return false;
7306 /* Treat C++ privatized non-static data members outside
7307 of the privatization the same. */
7308 if (omp_member_access_dummy_var (decl))
7309 return false;
7311 return true;
7314 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7316 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7317 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7318 continue;
7320 if (n != NULL)
7322 if ((n->value & GOVD_LOCAL) != 0
7323 && omp_member_access_dummy_var (decl))
7324 return false;
7325 return (n->value & GOVD_SHARED) == 0;
7328 while (ctx->region_type == ORT_WORKSHARE
7329 || ctx->region_type == ORT_SIMD
7330 || ctx->region_type == ORT_ACC);
7331 return false;
7334 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7336 static tree
7337 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7339 tree t = *tp;
7341 /* If this node has been visited, unmark it and keep looking. */
7342 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7343 return t;
7345 if (IS_TYPE_OR_DECL_P (t))
7346 *walk_subtrees = 0;
7347 return NULL_TREE;
7350 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7351 and previous omp contexts. */
7353 static void
7354 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7355 enum omp_region_type region_type,
7356 enum tree_code code)
7358 struct gimplify_omp_ctx *ctx, *outer_ctx;
7359 tree c;
7360 hash_map<tree, tree> *struct_map_to_clause = NULL;
7361 tree *prev_list_p = NULL;
7363 ctx = new_omp_context (region_type);
7364 outer_ctx = ctx->outer_context;
7365 if (code == OMP_TARGET)
7367 if (!lang_GNU_Fortran ())
7368 ctx->target_map_pointers_as_0len_arrays = true;
7369 ctx->target_map_scalars_firstprivate = true;
7371 if (!lang_GNU_Fortran ())
7372 switch (code)
7374 case OMP_TARGET:
7375 case OMP_TARGET_DATA:
7376 case OMP_TARGET_ENTER_DATA:
7377 case OMP_TARGET_EXIT_DATA:
7378 case OACC_DECLARE:
7379 case OACC_HOST_DATA:
7380 ctx->target_firstprivatize_array_bases = true;
7381 default:
7382 break;
7385 while ((c = *list_p) != NULL)
7387 bool remove = false;
7388 bool notice_outer = true;
7389 const char *check_non_private = NULL;
7390 unsigned int flags;
7391 tree decl;
7393 switch (OMP_CLAUSE_CODE (c))
7395 case OMP_CLAUSE_PRIVATE:
7396 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7397 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7399 flags |= GOVD_PRIVATE_OUTER_REF;
7400 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7402 else
7403 notice_outer = false;
7404 goto do_add;
7405 case OMP_CLAUSE_SHARED:
7406 flags = GOVD_SHARED | GOVD_EXPLICIT;
7407 goto do_add;
7408 case OMP_CLAUSE_FIRSTPRIVATE:
7409 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7410 check_non_private = "firstprivate";
7411 goto do_add;
7412 case OMP_CLAUSE_LASTPRIVATE:
7413 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7414 check_non_private = "lastprivate";
7415 decl = OMP_CLAUSE_DECL (c);
7416 if (error_operand_p (decl))
7417 goto do_add;
7418 else if (outer_ctx
7419 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7420 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7421 && splay_tree_lookup (outer_ctx->variables,
7422 (splay_tree_key) decl) == NULL)
7424 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7425 if (outer_ctx->outer_context)
7426 omp_notice_variable (outer_ctx->outer_context, decl, true);
7428 else if (outer_ctx
7429 && (outer_ctx->region_type & ORT_TASK) != 0
7430 && outer_ctx->combined_loop
7431 && splay_tree_lookup (outer_ctx->variables,
7432 (splay_tree_key) decl) == NULL)
7434 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7435 if (outer_ctx->outer_context)
7436 omp_notice_variable (outer_ctx->outer_context, decl, true);
7438 else if (outer_ctx
7439 && (outer_ctx->region_type == ORT_WORKSHARE
7440 || outer_ctx->region_type == ORT_ACC)
7441 && outer_ctx->combined_loop
7442 && splay_tree_lookup (outer_ctx->variables,
7443 (splay_tree_key) decl) == NULL
7444 && !omp_check_private (outer_ctx, decl, false))
7446 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7447 if (outer_ctx->outer_context
7448 && (outer_ctx->outer_context->region_type
7449 == ORT_COMBINED_PARALLEL)
7450 && splay_tree_lookup (outer_ctx->outer_context->variables,
7451 (splay_tree_key) decl) == NULL)
7453 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7454 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7455 if (octx->outer_context)
7457 octx = octx->outer_context;
7458 if (octx->region_type == ORT_WORKSHARE
7459 && octx->combined_loop
7460 && splay_tree_lookup (octx->variables,
7461 (splay_tree_key) decl) == NULL
7462 && !omp_check_private (octx, decl, false))
7464 omp_add_variable (octx, decl,
7465 GOVD_LASTPRIVATE | GOVD_SEEN);
7466 octx = octx->outer_context;
7467 if (octx
7468 && octx->region_type == ORT_COMBINED_TEAMS
7469 && (splay_tree_lookup (octx->variables,
7470 (splay_tree_key) decl)
7471 == NULL))
7473 omp_add_variable (octx, decl,
7474 GOVD_SHARED | GOVD_SEEN);
7475 octx = octx->outer_context;
7478 if (octx)
7479 omp_notice_variable (octx, decl, true);
7482 else if (outer_ctx->outer_context)
7483 omp_notice_variable (outer_ctx->outer_context, decl, true);
7485 goto do_add;
7486 case OMP_CLAUSE_REDUCTION:
7487 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7488 /* OpenACC permits reductions on private variables. */
7489 if (!(region_type & ORT_ACC))
7490 check_non_private = "reduction";
7491 decl = OMP_CLAUSE_DECL (c);
7492 if (TREE_CODE (decl) == MEM_REF)
7494 tree type = TREE_TYPE (decl);
7495 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7496 NULL, is_gimple_val, fb_rvalue, false)
7497 == GS_ERROR)
7499 remove = true;
7500 break;
7502 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7503 if (DECL_P (v))
7505 omp_firstprivatize_variable (ctx, v);
7506 omp_notice_variable (ctx, v, true);
7508 decl = TREE_OPERAND (decl, 0);
7509 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7511 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7512 NULL, is_gimple_val, fb_rvalue, false)
7513 == GS_ERROR)
7515 remove = true;
7516 break;
7518 v = TREE_OPERAND (decl, 1);
7519 if (DECL_P (v))
7521 omp_firstprivatize_variable (ctx, v);
7522 omp_notice_variable (ctx, v, true);
7524 decl = TREE_OPERAND (decl, 0);
7526 if (TREE_CODE (decl) == ADDR_EXPR
7527 || TREE_CODE (decl) == INDIRECT_REF)
7528 decl = TREE_OPERAND (decl, 0);
7530 goto do_add_decl;
7531 case OMP_CLAUSE_LINEAR:
7532 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7533 is_gimple_val, fb_rvalue) == GS_ERROR)
7535 remove = true;
7536 break;
7538 else
7540 if (code == OMP_SIMD
7541 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7543 struct gimplify_omp_ctx *octx = outer_ctx;
7544 if (octx
7545 && octx->region_type == ORT_WORKSHARE
7546 && octx->combined_loop
7547 && !octx->distribute)
7549 if (octx->outer_context
7550 && (octx->outer_context->region_type
7551 == ORT_COMBINED_PARALLEL))
7552 octx = octx->outer_context->outer_context;
7553 else
7554 octx = octx->outer_context;
7556 if (octx
7557 && octx->region_type == ORT_WORKSHARE
7558 && octx->combined_loop
7559 && octx->distribute)
7561 error_at (OMP_CLAUSE_LOCATION (c),
7562 "%<linear%> clause for variable other than "
7563 "loop iterator specified on construct "
7564 "combined with %<distribute%>");
7565 remove = true;
7566 break;
7569 /* For combined #pragma omp parallel for simd, need to put
7570 lastprivate and perhaps firstprivate too on the
7571 parallel. Similarly for #pragma omp for simd. */
7572 struct gimplify_omp_ctx *octx = outer_ctx;
7573 decl = NULL_TREE;
7576 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7577 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7578 break;
7579 decl = OMP_CLAUSE_DECL (c);
7580 if (error_operand_p (decl))
7582 decl = NULL_TREE;
7583 break;
7585 flags = GOVD_SEEN;
7586 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7587 flags |= GOVD_FIRSTPRIVATE;
7588 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7589 flags |= GOVD_LASTPRIVATE;
7590 if (octx
7591 && octx->region_type == ORT_WORKSHARE
7592 && octx->combined_loop)
7594 if (octx->outer_context
7595 && (octx->outer_context->region_type
7596 == ORT_COMBINED_PARALLEL))
7597 octx = octx->outer_context;
7598 else if (omp_check_private (octx, decl, false))
7599 break;
7601 else if (octx
7602 && (octx->region_type & ORT_TASK) != 0
7603 && octx->combined_loop)
7605 else if (octx
7606 && octx->region_type == ORT_COMBINED_PARALLEL
7607 && ctx->region_type == ORT_WORKSHARE
7608 && octx == outer_ctx)
7609 flags = GOVD_SEEN | GOVD_SHARED;
7610 else if (octx
7611 && octx->region_type == ORT_COMBINED_TEAMS)
7612 flags = GOVD_SEEN | GOVD_SHARED;
7613 else if (octx
7614 && octx->region_type == ORT_COMBINED_TARGET)
7616 flags &= ~GOVD_LASTPRIVATE;
7617 if (flags == GOVD_SEEN)
7618 break;
7620 else
7621 break;
7622 splay_tree_node on
7623 = splay_tree_lookup (octx->variables,
7624 (splay_tree_key) decl);
7625 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7627 octx = NULL;
7628 break;
7630 omp_add_variable (octx, decl, flags);
7631 if (octx->outer_context == NULL)
7632 break;
7633 octx = octx->outer_context;
7635 while (1);
7636 if (octx
7637 && decl
7638 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7639 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7640 omp_notice_variable (octx, decl, true);
7642 flags = GOVD_LINEAR | GOVD_EXPLICIT;
7643 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7644 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7646 notice_outer = false;
7647 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7649 goto do_add;
7651 case OMP_CLAUSE_MAP:
7652 decl = OMP_CLAUSE_DECL (c);
7653 if (error_operand_p (decl))
7654 remove = true;
7655 switch (code)
7657 case OMP_TARGET:
7658 break;
7659 case OACC_DATA:
7660 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7661 break;
7662 /* FALLTHRU */
7663 case OMP_TARGET_DATA:
7664 case OMP_TARGET_ENTER_DATA:
7665 case OMP_TARGET_EXIT_DATA:
7666 case OACC_ENTER_DATA:
7667 case OACC_EXIT_DATA:
7668 case OACC_HOST_DATA:
7669 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7670 || (OMP_CLAUSE_MAP_KIND (c)
7671 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7672 /* For target {,enter ,exit }data only the array slice is
7673 mapped, but not the pointer to it. */
7674 remove = true;
7675 break;
7676 default:
7677 break;
7679 if (remove)
7680 break;
7681 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7683 struct gimplify_omp_ctx *octx;
7684 for (octx = outer_ctx; octx; octx = octx->outer_context)
7686 if (octx->region_type != ORT_ACC_HOST_DATA)
7687 break;
7688 splay_tree_node n2
7689 = splay_tree_lookup (octx->variables,
7690 (splay_tree_key) decl);
7691 if (n2)
7692 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7693 "declared in enclosing %<host_data%> region",
7694 DECL_NAME (decl));
7697 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7698 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7699 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7700 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7701 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7703 remove = true;
7704 break;
7706 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7707 || (OMP_CLAUSE_MAP_KIND (c)
7708 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7709 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7711 OMP_CLAUSE_SIZE (c)
7712 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7713 false);
7714 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7715 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7717 if (!DECL_P (decl))
7719 tree d = decl, *pd;
7720 if (TREE_CODE (d) == ARRAY_REF)
7722 while (TREE_CODE (d) == ARRAY_REF)
7723 d = TREE_OPERAND (d, 0);
7724 if (TREE_CODE (d) == COMPONENT_REF
7725 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7726 decl = d;
7728 pd = &OMP_CLAUSE_DECL (c);
7729 if (d == decl
7730 && TREE_CODE (decl) == INDIRECT_REF
7731 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7732 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7733 == REFERENCE_TYPE))
7735 pd = &TREE_OPERAND (decl, 0);
7736 decl = TREE_OPERAND (decl, 0);
7738 if (TREE_CODE (decl) == COMPONENT_REF)
7740 while (TREE_CODE (decl) == COMPONENT_REF)
7741 decl = TREE_OPERAND (decl, 0);
7742 if (TREE_CODE (decl) == INDIRECT_REF
7743 && DECL_P (TREE_OPERAND (decl, 0))
7744 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7745 == REFERENCE_TYPE))
7746 decl = TREE_OPERAND (decl, 0);
7748 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7749 == GS_ERROR)
7751 remove = true;
7752 break;
7754 if (DECL_P (decl))
7756 if (error_operand_p (decl))
7758 remove = true;
7759 break;
7762 tree stype = TREE_TYPE (decl);
7763 if (TREE_CODE (stype) == REFERENCE_TYPE)
7764 stype = TREE_TYPE (stype);
7765 if (TYPE_SIZE_UNIT (stype) == NULL
7766 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7768 error_at (OMP_CLAUSE_LOCATION (c),
7769 "mapping field %qE of variable length "
7770 "structure", OMP_CLAUSE_DECL (c));
7771 remove = true;
7772 break;
7775 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7777 /* Error recovery. */
7778 if (prev_list_p == NULL)
7780 remove = true;
7781 break;
7783 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7785 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7786 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7788 remove = true;
7789 break;
7794 tree offset;
7795 HOST_WIDE_INT bitsize, bitpos;
7796 machine_mode mode;
7797 int unsignedp, reversep, volatilep = 0;
7798 tree base = OMP_CLAUSE_DECL (c);
7799 while (TREE_CODE (base) == ARRAY_REF)
7800 base = TREE_OPERAND (base, 0);
7801 if (TREE_CODE (base) == INDIRECT_REF)
7802 base = TREE_OPERAND (base, 0);
7803 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7804 &mode, &unsignedp, &reversep,
7805 &volatilep);
7806 tree orig_base = base;
7807 if ((TREE_CODE (base) == INDIRECT_REF
7808 || (TREE_CODE (base) == MEM_REF
7809 && integer_zerop (TREE_OPERAND (base, 1))))
7810 && DECL_P (TREE_OPERAND (base, 0))
7811 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7812 == REFERENCE_TYPE))
7813 base = TREE_OPERAND (base, 0);
7814 gcc_assert (base == decl
7815 && (offset == NULL_TREE
7816 || TREE_CODE (offset) == INTEGER_CST));
7818 splay_tree_node n
7819 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7820 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7821 == GOMP_MAP_ALWAYS_POINTER);
7822 if (n == NULL || (n->value & GOVD_MAP) == 0)
7824 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7825 OMP_CLAUSE_MAP);
7826 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7827 if (orig_base != base)
7828 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7829 else
7830 OMP_CLAUSE_DECL (l) = decl;
7831 OMP_CLAUSE_SIZE (l) = size_int (1);
7832 if (struct_map_to_clause == NULL)
7833 struct_map_to_clause = new hash_map<tree, tree>;
7834 struct_map_to_clause->put (decl, l);
7835 if (ptr)
7837 enum gomp_map_kind mkind
7838 = code == OMP_TARGET_EXIT_DATA
7839 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7840 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7841 OMP_CLAUSE_MAP);
7842 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7843 OMP_CLAUSE_DECL (c2)
7844 = unshare_expr (OMP_CLAUSE_DECL (c));
7845 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7846 OMP_CLAUSE_SIZE (c2)
7847 = TYPE_SIZE_UNIT (ptr_type_node);
7848 OMP_CLAUSE_CHAIN (l) = c2;
7849 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7851 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7852 tree c3
7853 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7854 OMP_CLAUSE_MAP);
7855 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7856 OMP_CLAUSE_DECL (c3)
7857 = unshare_expr (OMP_CLAUSE_DECL (c4));
7858 OMP_CLAUSE_SIZE (c3)
7859 = TYPE_SIZE_UNIT (ptr_type_node);
7860 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7861 OMP_CLAUSE_CHAIN (c2) = c3;
7863 *prev_list_p = l;
7864 prev_list_p = NULL;
7866 else
7868 OMP_CLAUSE_CHAIN (l) = c;
7869 *list_p = l;
7870 list_p = &OMP_CLAUSE_CHAIN (l);
7872 if (orig_base != base && code == OMP_TARGET)
7874 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7875 OMP_CLAUSE_MAP);
7876 enum gomp_map_kind mkind
7877 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7878 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7879 OMP_CLAUSE_DECL (c2) = decl;
7880 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7881 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7882 OMP_CLAUSE_CHAIN (l) = c2;
7884 flags = GOVD_MAP | GOVD_EXPLICIT;
7885 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7886 flags |= GOVD_SEEN;
7887 goto do_add_decl;
7889 else
7891 tree *osc = struct_map_to_clause->get (decl);
7892 tree *sc = NULL, *scp = NULL;
7893 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7894 n->value |= GOVD_SEEN;
7895 offset_int o1, o2;
7896 if (offset)
7897 o1 = wi::to_offset (offset);
7898 else
7899 o1 = 0;
7900 if (bitpos)
7901 o1 = o1 + bitpos / BITS_PER_UNIT;
7902 sc = &OMP_CLAUSE_CHAIN (*osc);
7903 if (*sc != c
7904 && (OMP_CLAUSE_MAP_KIND (*sc)
7905 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7906 sc = &OMP_CLAUSE_CHAIN (*sc);
7907 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7908 if (ptr && sc == prev_list_p)
7909 break;
7910 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7911 != COMPONENT_REF
7912 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7913 != INDIRECT_REF)
7914 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7915 != ARRAY_REF))
7916 break;
7917 else
7919 tree offset2;
7920 HOST_WIDE_INT bitsize2, bitpos2;
7921 base = OMP_CLAUSE_DECL (*sc);
7922 if (TREE_CODE (base) == ARRAY_REF)
7924 while (TREE_CODE (base) == ARRAY_REF)
7925 base = TREE_OPERAND (base, 0);
7926 if (TREE_CODE (base) != COMPONENT_REF
7927 || (TREE_CODE (TREE_TYPE (base))
7928 != ARRAY_TYPE))
7929 break;
7931 else if (TREE_CODE (base) == INDIRECT_REF
7932 && (TREE_CODE (TREE_OPERAND (base, 0))
7933 == COMPONENT_REF)
7934 && (TREE_CODE (TREE_TYPE
7935 (TREE_OPERAND (base, 0)))
7936 == REFERENCE_TYPE))
7937 base = TREE_OPERAND (base, 0);
7938 base = get_inner_reference (base, &bitsize2,
7939 &bitpos2, &offset2,
7940 &mode, &unsignedp,
7941 &reversep, &volatilep);
7942 if ((TREE_CODE (base) == INDIRECT_REF
7943 || (TREE_CODE (base) == MEM_REF
7944 && integer_zerop (TREE_OPERAND (base,
7945 1))))
7946 && DECL_P (TREE_OPERAND (base, 0))
7947 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
7948 0)))
7949 == REFERENCE_TYPE))
7950 base = TREE_OPERAND (base, 0);
7951 if (base != decl)
7952 break;
7953 if (scp)
7954 continue;
7955 gcc_assert (offset == NULL_TREE
7956 || TREE_CODE (offset) == INTEGER_CST);
7957 tree d1 = OMP_CLAUSE_DECL (*sc);
7958 tree d2 = OMP_CLAUSE_DECL (c);
7959 while (TREE_CODE (d1) == ARRAY_REF)
7960 d1 = TREE_OPERAND (d1, 0);
7961 while (TREE_CODE (d2) == ARRAY_REF)
7962 d2 = TREE_OPERAND (d2, 0);
7963 if (TREE_CODE (d1) == INDIRECT_REF)
7964 d1 = TREE_OPERAND (d1, 0);
7965 if (TREE_CODE (d2) == INDIRECT_REF)
7966 d2 = TREE_OPERAND (d2, 0);
7967 while (TREE_CODE (d1) == COMPONENT_REF)
7968 if (TREE_CODE (d2) == COMPONENT_REF
7969 && TREE_OPERAND (d1, 1)
7970 == TREE_OPERAND (d2, 1))
7972 d1 = TREE_OPERAND (d1, 0);
7973 d2 = TREE_OPERAND (d2, 0);
7975 else
7976 break;
7977 if (d1 == d2)
7979 error_at (OMP_CLAUSE_LOCATION (c),
7980 "%qE appears more than once in map "
7981 "clauses", OMP_CLAUSE_DECL (c));
7982 remove = true;
7983 break;
7985 if (offset2)
7986 o2 = wi::to_offset (offset2);
7987 else
7988 o2 = 0;
7989 if (bitpos2)
7990 o2 = o2 + bitpos2 / BITS_PER_UNIT;
7991 if (wi::ltu_p (o1, o2)
7992 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
7994 if (ptr)
7995 scp = sc;
7996 else
7997 break;
8000 if (remove)
8001 break;
8002 OMP_CLAUSE_SIZE (*osc)
8003 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8004 size_one_node);
8005 if (ptr)
8007 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8008 OMP_CLAUSE_MAP);
8009 tree cl = NULL_TREE;
8010 enum gomp_map_kind mkind
8011 = code == OMP_TARGET_EXIT_DATA
8012 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8013 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8014 OMP_CLAUSE_DECL (c2)
8015 = unshare_expr (OMP_CLAUSE_DECL (c));
8016 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8017 OMP_CLAUSE_SIZE (c2)
8018 = TYPE_SIZE_UNIT (ptr_type_node);
8019 cl = scp ? *prev_list_p : c2;
8020 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8022 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8023 tree c3
8024 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8025 OMP_CLAUSE_MAP);
8026 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8027 OMP_CLAUSE_DECL (c3)
8028 = unshare_expr (OMP_CLAUSE_DECL (c4));
8029 OMP_CLAUSE_SIZE (c3)
8030 = TYPE_SIZE_UNIT (ptr_type_node);
8031 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8032 if (!scp)
8033 OMP_CLAUSE_CHAIN (c2) = c3;
8034 else
8035 cl = c3;
8037 if (scp)
8038 *scp = c2;
8039 if (sc == prev_list_p)
8041 *sc = cl;
8042 prev_list_p = NULL;
8044 else
8046 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8047 list_p = prev_list_p;
8048 prev_list_p = NULL;
8049 OMP_CLAUSE_CHAIN (c) = *sc;
8050 *sc = cl;
8051 continue;
8054 else if (*sc != c)
8056 *list_p = OMP_CLAUSE_CHAIN (c);
8057 OMP_CLAUSE_CHAIN (c) = *sc;
8058 *sc = c;
8059 continue;
8063 if (!remove
8064 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8065 && OMP_CLAUSE_CHAIN (c)
8066 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8067 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8068 == GOMP_MAP_ALWAYS_POINTER))
8069 prev_list_p = list_p;
8070 break;
8072 flags = GOVD_MAP | GOVD_EXPLICIT;
8073 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8074 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8075 flags |= GOVD_MAP_ALWAYS_TO;
8076 goto do_add;
8078 case OMP_CLAUSE_DEPEND:
8079 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8081 tree deps = OMP_CLAUSE_DECL (c);
8082 while (deps && TREE_CODE (deps) == TREE_LIST)
8084 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8085 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8086 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8087 pre_p, NULL, is_gimple_val, fb_rvalue);
8088 deps = TREE_CHAIN (deps);
8090 break;
8092 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8093 break;
8094 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8096 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8097 NULL, is_gimple_val, fb_rvalue);
8098 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8100 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8102 remove = true;
8103 break;
8105 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8106 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8107 is_gimple_val, fb_rvalue) == GS_ERROR)
8109 remove = true;
8110 break;
8112 break;
8114 case OMP_CLAUSE_TO:
8115 case OMP_CLAUSE_FROM:
8116 case OMP_CLAUSE__CACHE_:
8117 decl = OMP_CLAUSE_DECL (c);
8118 if (error_operand_p (decl))
8120 remove = true;
8121 break;
8123 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8124 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8125 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8126 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8127 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8129 remove = true;
8130 break;
8132 if (!DECL_P (decl))
8134 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8135 NULL, is_gimple_lvalue, fb_lvalue)
8136 == GS_ERROR)
8138 remove = true;
8139 break;
8141 break;
8143 goto do_notice;
8145 case OMP_CLAUSE_USE_DEVICE_PTR:
8146 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8147 goto do_add;
8148 case OMP_CLAUSE_IS_DEVICE_PTR:
8149 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8150 goto do_add;
8152 do_add:
8153 decl = OMP_CLAUSE_DECL (c);
8154 do_add_decl:
8155 if (error_operand_p (decl))
8157 remove = true;
8158 break;
8160 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8162 tree t = omp_member_access_dummy_var (decl);
8163 if (t)
8165 tree v = DECL_VALUE_EXPR (decl);
8166 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8167 if (outer_ctx)
8168 omp_notice_variable (outer_ctx, t, true);
8171 if (code == OACC_DATA
8172 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8173 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8174 flags |= GOVD_MAP_0LEN_ARRAY;
8175 omp_add_variable (ctx, decl, flags);
8176 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8177 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8179 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8180 GOVD_LOCAL | GOVD_SEEN);
8181 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8182 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8183 find_decl_expr,
8184 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8185 NULL) == NULL_TREE)
8186 omp_add_variable (ctx,
8187 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8188 GOVD_LOCAL | GOVD_SEEN);
8189 gimplify_omp_ctxp = ctx;
8190 push_gimplify_context ();
8192 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8193 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8195 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8196 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8197 pop_gimplify_context
8198 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8199 push_gimplify_context ();
8200 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8201 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8202 pop_gimplify_context
8203 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8204 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8205 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8207 gimplify_omp_ctxp = outer_ctx;
8209 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8210 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8212 gimplify_omp_ctxp = ctx;
8213 push_gimplify_context ();
8214 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8216 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8217 NULL, NULL);
8218 TREE_SIDE_EFFECTS (bind) = 1;
8219 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8220 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8222 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8223 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8224 pop_gimplify_context
8225 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8226 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8228 gimplify_omp_ctxp = outer_ctx;
8230 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8231 && OMP_CLAUSE_LINEAR_STMT (c))
8233 gimplify_omp_ctxp = ctx;
8234 push_gimplify_context ();
8235 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8237 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8238 NULL, NULL);
8239 TREE_SIDE_EFFECTS (bind) = 1;
8240 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8241 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8243 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8244 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8245 pop_gimplify_context
8246 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8247 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8249 gimplify_omp_ctxp = outer_ctx;
8251 if (notice_outer)
8252 goto do_notice;
8253 break;
8255 case OMP_CLAUSE_COPYIN:
8256 case OMP_CLAUSE_COPYPRIVATE:
8257 decl = OMP_CLAUSE_DECL (c);
8258 if (error_operand_p (decl))
8260 remove = true;
8261 break;
8263 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8264 && !remove
8265 && !omp_check_private (ctx, decl, true))
8267 remove = true;
8268 if (is_global_var (decl))
8270 if (DECL_THREAD_LOCAL_P (decl))
8271 remove = false;
8272 else if (DECL_HAS_VALUE_EXPR_P (decl))
8274 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8276 if (value
8277 && DECL_P (value)
8278 && DECL_THREAD_LOCAL_P (value))
8279 remove = false;
8282 if (remove)
8283 error_at (OMP_CLAUSE_LOCATION (c),
8284 "copyprivate variable %qE is not threadprivate"
8285 " or private in outer context", DECL_NAME (decl));
8287 do_notice:
8288 if (outer_ctx)
8289 omp_notice_variable (outer_ctx, decl, true);
8290 if (check_non_private
8291 && region_type == ORT_WORKSHARE
8292 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8293 || decl == OMP_CLAUSE_DECL (c)
8294 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8295 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8296 == ADDR_EXPR
8297 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8298 == POINTER_PLUS_EXPR
8299 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8300 (OMP_CLAUSE_DECL (c), 0), 0))
8301 == ADDR_EXPR)))))
8302 && omp_check_private (ctx, decl, false))
8304 error ("%s variable %qE is private in outer context",
8305 check_non_private, DECL_NAME (decl));
8306 remove = true;
8308 break;
8310 case OMP_CLAUSE_IF:
8311 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8312 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8314 const char *p[2];
8315 for (int i = 0; i < 2; i++)
8316 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8318 case OMP_PARALLEL: p[i] = "parallel"; break;
8319 case OMP_TASK: p[i] = "task"; break;
8320 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8321 case OMP_TARGET_DATA: p[i] = "target data"; break;
8322 case OMP_TARGET: p[i] = "target"; break;
8323 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8324 case OMP_TARGET_ENTER_DATA:
8325 p[i] = "target enter data"; break;
8326 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8327 default: gcc_unreachable ();
8329 error_at (OMP_CLAUSE_LOCATION (c),
8330 "expected %qs %<if%> clause modifier rather than %qs",
8331 p[0], p[1]);
8332 remove = true;
8334 /* Fall through. */
8336 case OMP_CLAUSE_FINAL:
8337 OMP_CLAUSE_OPERAND (c, 0)
8338 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8339 /* Fall through. */
8341 case OMP_CLAUSE_SCHEDULE:
8342 case OMP_CLAUSE_NUM_THREADS:
8343 case OMP_CLAUSE_NUM_TEAMS:
8344 case OMP_CLAUSE_THREAD_LIMIT:
8345 case OMP_CLAUSE_DIST_SCHEDULE:
8346 case OMP_CLAUSE_DEVICE:
8347 case OMP_CLAUSE_PRIORITY:
8348 case OMP_CLAUSE_GRAINSIZE:
8349 case OMP_CLAUSE_NUM_TASKS:
8350 case OMP_CLAUSE_HINT:
8351 case OMP_CLAUSE__CILK_FOR_COUNT_:
8352 case OMP_CLAUSE_ASYNC:
8353 case OMP_CLAUSE_WAIT:
8354 case OMP_CLAUSE_NUM_GANGS:
8355 case OMP_CLAUSE_NUM_WORKERS:
8356 case OMP_CLAUSE_VECTOR_LENGTH:
8357 case OMP_CLAUSE_WORKER:
8358 case OMP_CLAUSE_VECTOR:
8359 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8360 is_gimple_val, fb_rvalue) == GS_ERROR)
8361 remove = true;
8362 break;
8364 case OMP_CLAUSE_GANG:
8365 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8366 is_gimple_val, fb_rvalue) == GS_ERROR)
8367 remove = true;
8368 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8369 is_gimple_val, fb_rvalue) == GS_ERROR)
8370 remove = true;
8371 break;
8373 case OMP_CLAUSE_NOWAIT:
8374 case OMP_CLAUSE_ORDERED:
8375 case OMP_CLAUSE_UNTIED:
8376 case OMP_CLAUSE_COLLAPSE:
8377 case OMP_CLAUSE_TILE:
8378 case OMP_CLAUSE_AUTO:
8379 case OMP_CLAUSE_SEQ:
8380 case OMP_CLAUSE_INDEPENDENT:
8381 case OMP_CLAUSE_MERGEABLE:
8382 case OMP_CLAUSE_PROC_BIND:
8383 case OMP_CLAUSE_SAFELEN:
8384 case OMP_CLAUSE_SIMDLEN:
8385 case OMP_CLAUSE_NOGROUP:
8386 case OMP_CLAUSE_THREADS:
8387 case OMP_CLAUSE_SIMD:
8388 break;
8390 case OMP_CLAUSE_DEFAULTMAP:
8391 ctx->target_map_scalars_firstprivate = false;
8392 break;
8394 case OMP_CLAUSE_ALIGNED:
8395 decl = OMP_CLAUSE_DECL (c);
8396 if (error_operand_p (decl))
8398 remove = true;
8399 break;
8401 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8402 is_gimple_val, fb_rvalue) == GS_ERROR)
8404 remove = true;
8405 break;
8407 if (!is_global_var (decl)
8408 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8409 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8410 break;
8412 case OMP_CLAUSE_DEFAULT:
8413 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8414 break;
8416 default:
8417 gcc_unreachable ();
8420 if (code == OACC_DATA
8421 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8422 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8423 remove = true;
8424 if (remove)
8425 *list_p = OMP_CLAUSE_CHAIN (c);
8426 else
8427 list_p = &OMP_CLAUSE_CHAIN (c);
8430 gimplify_omp_ctxp = ctx;
8431 if (struct_map_to_clause)
8432 delete struct_map_to_clause;
8435 /* Return true if DECL is a candidate for shared to firstprivate
8436 optimization. We only consider non-addressable scalars, not
8437 too big, and not references. */
8439 static bool
8440 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8442 if (TREE_ADDRESSABLE (decl))
8443 return false;
8444 tree type = TREE_TYPE (decl);
8445 if (!is_gimple_reg_type (type)
8446 || TREE_CODE (type) == REFERENCE_TYPE
8447 || TREE_ADDRESSABLE (type))
8448 return false;
8449 /* Don't optimize too large decls, as each thread/task will have
8450 its own. */
8451 HOST_WIDE_INT len = int_size_in_bytes (type);
8452 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8453 return false;
8454 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8455 return false;
8456 return true;
8459 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8460 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8461 GOVD_WRITTEN in outer contexts. */
8463 static void
8464 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8466 for (; ctx; ctx = ctx->outer_context)
8468 splay_tree_node n = splay_tree_lookup (ctx->variables,
8469 (splay_tree_key) decl);
8470 if (n == NULL)
8471 continue;
8472 else if (n->value & GOVD_SHARED)
8474 n->value |= GOVD_WRITTEN;
8475 return;
8477 else if (n->value & GOVD_DATA_SHARE_CLASS)
8478 return;
8482 /* Helper callback for walk_gimple_seq to discover possible stores
8483 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8484 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8485 for those. */
8487 static tree
8488 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8490 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8492 *walk_subtrees = 0;
8493 if (!wi->is_lhs)
8494 return NULL_TREE;
8496 tree op = *tp;
8499 if (handled_component_p (op))
8500 op = TREE_OPERAND (op, 0);
8501 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8502 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8503 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8504 else
8505 break;
8507 while (1);
8508 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8509 return NULL_TREE;
8511 omp_mark_stores (gimplify_omp_ctxp, op);
8512 return NULL_TREE;
8515 /* Helper callback for walk_gimple_seq to discover possible stores
8516 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8517 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8518 for those. */
8520 static tree
8521 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8522 bool *handled_ops_p,
8523 struct walk_stmt_info *wi)
8525 gimple *stmt = gsi_stmt (*gsi_p);
8526 switch (gimple_code (stmt))
8528 /* Don't recurse on OpenMP constructs for which
8529 gimplify_adjust_omp_clauses already handled the bodies,
8530 except handle gimple_omp_for_pre_body. */
8531 case GIMPLE_OMP_FOR:
8532 *handled_ops_p = true;
8533 if (gimple_omp_for_pre_body (stmt))
8534 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8535 omp_find_stores_stmt, omp_find_stores_op, wi);
8536 break;
8537 case GIMPLE_OMP_PARALLEL:
8538 case GIMPLE_OMP_TASK:
8539 case GIMPLE_OMP_SECTIONS:
8540 case GIMPLE_OMP_SINGLE:
8541 case GIMPLE_OMP_TARGET:
8542 case GIMPLE_OMP_TEAMS:
8543 case GIMPLE_OMP_CRITICAL:
8544 *handled_ops_p = true;
8545 break;
8546 default:
8547 break;
8549 return NULL_TREE;
8552 struct gimplify_adjust_omp_clauses_data
8554 tree *list_p;
8555 gimple_seq *pre_p;
8558 /* For all variables that were not actually used within the context,
8559 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8561 static int
8562 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8564 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8565 gimple_seq *pre_p
8566 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8567 tree decl = (tree) n->key;
8568 unsigned flags = n->value;
8569 enum omp_clause_code code;
8570 tree clause;
8571 bool private_debug;
8573 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8574 return 0;
8575 if ((flags & GOVD_SEEN) == 0)
8576 return 0;
8577 if (flags & GOVD_DEBUG_PRIVATE)
8579 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
8580 private_debug = true;
8582 else if (flags & GOVD_MAP)
8583 private_debug = false;
8584 else
8585 private_debug
8586 = lang_hooks.decls.omp_private_debug_clause (decl,
8587 !!(flags & GOVD_SHARED));
8588 if (private_debug)
8589 code = OMP_CLAUSE_PRIVATE;
8590 else if (flags & GOVD_MAP)
8592 code = OMP_CLAUSE_MAP;
8593 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8594 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8596 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8597 return 0;
8600 else if (flags & GOVD_SHARED)
8602 if (is_global_var (decl))
8604 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8605 while (ctx != NULL)
8607 splay_tree_node on
8608 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8609 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8610 | GOVD_PRIVATE | GOVD_REDUCTION
8611 | GOVD_LINEAR | GOVD_MAP)) != 0)
8612 break;
8613 ctx = ctx->outer_context;
8615 if (ctx == NULL)
8616 return 0;
8618 code = OMP_CLAUSE_SHARED;
8620 else if (flags & GOVD_PRIVATE)
8621 code = OMP_CLAUSE_PRIVATE;
8622 else if (flags & GOVD_FIRSTPRIVATE)
8624 code = OMP_CLAUSE_FIRSTPRIVATE;
8625 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8626 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8627 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8629 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8630 "%<target%> construct", decl);
8631 return 0;
8634 else if (flags & GOVD_LASTPRIVATE)
8635 code = OMP_CLAUSE_LASTPRIVATE;
8636 else if (flags & GOVD_ALIGNED)
8637 return 0;
8638 else
8639 gcc_unreachable ();
8641 if (((flags & GOVD_LASTPRIVATE)
8642 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8643 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8644 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8646 tree chain = *list_p;
8647 clause = build_omp_clause (input_location, code);
8648 OMP_CLAUSE_DECL (clause) = decl;
8649 OMP_CLAUSE_CHAIN (clause) = chain;
8650 if (private_debug)
8651 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8652 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8653 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8654 else if (code == OMP_CLAUSE_SHARED
8655 && (flags & GOVD_WRITTEN) == 0
8656 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8657 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8658 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8659 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8660 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8662 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8663 OMP_CLAUSE_DECL (nc) = decl;
8664 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8665 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8666 OMP_CLAUSE_DECL (clause)
8667 = build_simple_mem_ref_loc (input_location, decl);
8668 OMP_CLAUSE_DECL (clause)
8669 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8670 build_int_cst (build_pointer_type (char_type_node), 0));
8671 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8672 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8673 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8674 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8675 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8676 OMP_CLAUSE_CHAIN (nc) = chain;
8677 OMP_CLAUSE_CHAIN (clause) = nc;
8678 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8679 gimplify_omp_ctxp = ctx->outer_context;
8680 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8681 pre_p, NULL, is_gimple_val, fb_rvalue);
8682 gimplify_omp_ctxp = ctx;
8684 else if (code == OMP_CLAUSE_MAP)
8686 int kind = (flags & GOVD_MAP_TO_ONLY
8687 ? GOMP_MAP_TO
8688 : GOMP_MAP_TOFROM);
8689 if (flags & GOVD_MAP_FORCE)
8690 kind |= GOMP_MAP_FLAG_FORCE;
8691 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8692 if (DECL_SIZE (decl)
8693 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8695 tree decl2 = DECL_VALUE_EXPR (decl);
8696 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8697 decl2 = TREE_OPERAND (decl2, 0);
8698 gcc_assert (DECL_P (decl2));
8699 tree mem = build_simple_mem_ref (decl2);
8700 OMP_CLAUSE_DECL (clause) = mem;
8701 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8702 if (gimplify_omp_ctxp->outer_context)
8704 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8705 omp_notice_variable (ctx, decl2, true);
8706 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8708 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8709 OMP_CLAUSE_MAP);
8710 OMP_CLAUSE_DECL (nc) = decl;
8711 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8712 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8713 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8714 else
8715 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8716 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8717 OMP_CLAUSE_CHAIN (clause) = nc;
8719 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8720 && lang_hooks.decls.omp_privatize_by_reference (decl))
8722 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8723 OMP_CLAUSE_SIZE (clause)
8724 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8725 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8726 gimplify_omp_ctxp = ctx->outer_context;
8727 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8728 pre_p, NULL, is_gimple_val, fb_rvalue);
8729 gimplify_omp_ctxp = ctx;
8730 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8731 OMP_CLAUSE_MAP);
8732 OMP_CLAUSE_DECL (nc) = decl;
8733 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8734 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8735 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8736 OMP_CLAUSE_CHAIN (clause) = nc;
8738 else
8739 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8741 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8743 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8744 OMP_CLAUSE_DECL (nc) = decl;
8745 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8746 OMP_CLAUSE_CHAIN (nc) = chain;
8747 OMP_CLAUSE_CHAIN (clause) = nc;
8748 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8749 gimplify_omp_ctxp = ctx->outer_context;
8750 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8751 gimplify_omp_ctxp = ctx;
8753 *list_p = clause;
8754 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8755 gimplify_omp_ctxp = ctx->outer_context;
8756 lang_hooks.decls.omp_finish_clause (clause, pre_p);
8757 if (gimplify_omp_ctxp)
8758 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
8759 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
8760 && DECL_P (OMP_CLAUSE_SIZE (clause)))
8761 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
8762 true);
8763 gimplify_omp_ctxp = ctx;
8764 return 0;
8767 static void
8768 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
8769 enum tree_code code)
8771 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8772 tree c, decl;
8774 if (body)
8776 struct gimplify_omp_ctx *octx;
8777 for (octx = ctx; octx; octx = octx->outer_context)
8778 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8779 break;
8780 if (octx)
8782 struct walk_stmt_info wi;
8783 memset (&wi, 0, sizeof (wi));
8784 walk_gimple_seq (body, omp_find_stores_stmt,
8785 omp_find_stores_op, &wi);
8788 while ((c = *list_p) != NULL)
8790 splay_tree_node n;
8791 bool remove = false;
8793 switch (OMP_CLAUSE_CODE (c))
8795 case OMP_CLAUSE_FIRSTPRIVATE:
8796 if ((ctx->region_type & ORT_TARGET)
8797 && (ctx->region_type & ORT_ACC) == 0
8798 && TYPE_ATOMIC (strip_array_types
8799 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
8801 error_at (OMP_CLAUSE_LOCATION (c),
8802 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8803 "%<target%> construct", OMP_CLAUSE_DECL (c));
8804 remove = true;
8805 break;
8807 /* FALLTHRU */
8808 case OMP_CLAUSE_PRIVATE:
8809 case OMP_CLAUSE_SHARED:
8810 case OMP_CLAUSE_LINEAR:
8811 decl = OMP_CLAUSE_DECL (c);
8812 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8813 remove = !(n->value & GOVD_SEEN);
8814 if (! remove)
8816 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8817 if ((n->value & GOVD_DEBUG_PRIVATE)
8818 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8820 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8821 || ((n->value & GOVD_DATA_SHARE_CLASS)
8822 == GOVD_PRIVATE));
8823 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8824 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8826 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8827 && (n->value & GOVD_WRITTEN) == 0
8828 && DECL_P (decl)
8829 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8830 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8831 else if (DECL_P (decl)
8832 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8833 && (n->value & GOVD_WRITTEN) != 1)
8834 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8835 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8836 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8837 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8839 break;
8841 case OMP_CLAUSE_LASTPRIVATE:
8842 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8843 accurately reflect the presence of a FIRSTPRIVATE clause. */
8844 decl = OMP_CLAUSE_DECL (c);
8845 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8846 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8847 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8848 if (code == OMP_DISTRIBUTE
8849 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8851 remove = true;
8852 error_at (OMP_CLAUSE_LOCATION (c),
8853 "same variable used in %<firstprivate%> and "
8854 "%<lastprivate%> clauses on %<distribute%> "
8855 "construct");
8857 if (!remove
8858 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8859 && DECL_P (decl)
8860 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8861 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8862 break;
8864 case OMP_CLAUSE_ALIGNED:
8865 decl = OMP_CLAUSE_DECL (c);
8866 if (!is_global_var (decl))
8868 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8869 remove = n == NULL || !(n->value & GOVD_SEEN);
8870 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8872 struct gimplify_omp_ctx *octx;
8873 if (n != NULL
8874 && (n->value & (GOVD_DATA_SHARE_CLASS
8875 & ~GOVD_FIRSTPRIVATE)))
8876 remove = true;
8877 else
8878 for (octx = ctx->outer_context; octx;
8879 octx = octx->outer_context)
8881 n = splay_tree_lookup (octx->variables,
8882 (splay_tree_key) decl);
8883 if (n == NULL)
8884 continue;
8885 if (n->value & GOVD_LOCAL)
8886 break;
8887 /* We have to avoid assigning a shared variable
8888 to itself when trying to add
8889 __builtin_assume_aligned. */
8890 if (n->value & GOVD_SHARED)
8892 remove = true;
8893 break;
8898 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8900 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8901 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8902 remove = true;
8904 break;
8906 case OMP_CLAUSE_MAP:
8907 if (code == OMP_TARGET_EXIT_DATA
8908 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8910 remove = true;
8911 break;
8913 decl = OMP_CLAUSE_DECL (c);
8914 /* Data clauses associated with acc parallel reductions must be
8915 compatible with present_or_copy. Warn and adjust the clause
8916 if that is not the case. */
8917 if (ctx->region_type == ORT_ACC_PARALLEL)
8919 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
8920 n = NULL;
8922 if (DECL_P (t))
8923 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8925 if (n && (n->value & GOVD_REDUCTION))
8927 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
8929 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
8930 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
8931 && kind != GOMP_MAP_FORCE_PRESENT
8932 && kind != GOMP_MAP_POINTER)
8934 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8935 "incompatible data clause with reduction "
8936 "on %qE; promoting to present_or_copy",
8937 DECL_NAME (t));
8938 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
8942 if (!DECL_P (decl))
8944 if ((ctx->region_type & ORT_TARGET) != 0
8945 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8947 if (TREE_CODE (decl) == INDIRECT_REF
8948 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8949 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8950 == REFERENCE_TYPE))
8951 decl = TREE_OPERAND (decl, 0);
8952 if (TREE_CODE (decl) == COMPONENT_REF)
8954 while (TREE_CODE (decl) == COMPONENT_REF)
8955 decl = TREE_OPERAND (decl, 0);
8956 if (DECL_P (decl))
8958 n = splay_tree_lookup (ctx->variables,
8959 (splay_tree_key) decl);
8960 if (!(n->value & GOVD_SEEN))
8961 remove = true;
8965 break;
8967 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8968 if ((ctx->region_type & ORT_TARGET) != 0
8969 && !(n->value & GOVD_SEEN)
8970 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
8971 && (!is_global_var (decl)
8972 || !lookup_attribute ("omp declare target link",
8973 DECL_ATTRIBUTES (decl))))
8975 remove = true;
8976 /* For struct element mapping, if struct is never referenced
8977 in target block and none of the mapping has always modifier,
8978 remove all the struct element mappings, which immediately
8979 follow the GOMP_MAP_STRUCT map clause. */
8980 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
8982 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
8983 while (cnt--)
8984 OMP_CLAUSE_CHAIN (c)
8985 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
8988 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
8989 && code == OMP_TARGET_EXIT_DATA)
8990 remove = true;
8991 else if (DECL_SIZE (decl)
8992 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
8993 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
8994 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
8995 && (OMP_CLAUSE_MAP_KIND (c)
8996 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8998 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
8999 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9000 INTEGER_CST. */
9001 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9003 tree decl2 = DECL_VALUE_EXPR (decl);
9004 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9005 decl2 = TREE_OPERAND (decl2, 0);
9006 gcc_assert (DECL_P (decl2));
9007 tree mem = build_simple_mem_ref (decl2);
9008 OMP_CLAUSE_DECL (c) = mem;
9009 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9010 if (ctx->outer_context)
9012 omp_notice_variable (ctx->outer_context, decl2, true);
9013 omp_notice_variable (ctx->outer_context,
9014 OMP_CLAUSE_SIZE (c), true);
9016 if (((ctx->region_type & ORT_TARGET) != 0
9017 || !ctx->target_firstprivatize_array_bases)
9018 && ((n->value & GOVD_SEEN) == 0
9019 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9021 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9022 OMP_CLAUSE_MAP);
9023 OMP_CLAUSE_DECL (nc) = decl;
9024 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9025 if (ctx->target_firstprivatize_array_bases)
9026 OMP_CLAUSE_SET_MAP_KIND (nc,
9027 GOMP_MAP_FIRSTPRIVATE_POINTER);
9028 else
9029 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9030 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9031 OMP_CLAUSE_CHAIN (c) = nc;
9032 c = nc;
9035 else
9037 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9038 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9039 gcc_assert ((n->value & GOVD_SEEN) == 0
9040 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9041 == 0));
9043 break;
9045 case OMP_CLAUSE_TO:
9046 case OMP_CLAUSE_FROM:
9047 case OMP_CLAUSE__CACHE_:
9048 decl = OMP_CLAUSE_DECL (c);
9049 if (!DECL_P (decl))
9050 break;
9051 if (DECL_SIZE (decl)
9052 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9054 tree decl2 = DECL_VALUE_EXPR (decl);
9055 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9056 decl2 = TREE_OPERAND (decl2, 0);
9057 gcc_assert (DECL_P (decl2));
9058 tree mem = build_simple_mem_ref (decl2);
9059 OMP_CLAUSE_DECL (c) = mem;
9060 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9061 if (ctx->outer_context)
9063 omp_notice_variable (ctx->outer_context, decl2, true);
9064 omp_notice_variable (ctx->outer_context,
9065 OMP_CLAUSE_SIZE (c), true);
9068 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9069 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9070 break;
9072 case OMP_CLAUSE_REDUCTION:
9073 decl = OMP_CLAUSE_DECL (c);
9074 /* OpenACC reductions need a present_or_copy data clause.
9075 Add one if necessary. Error is the reduction is private. */
9076 if (ctx->region_type == ORT_ACC_PARALLEL)
9078 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9079 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9080 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9081 "reduction on %qE", DECL_NAME (decl));
9082 else if ((n->value & GOVD_MAP) == 0)
9084 tree next = OMP_CLAUSE_CHAIN (c);
9085 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9086 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9087 OMP_CLAUSE_DECL (nc) = decl;
9088 OMP_CLAUSE_CHAIN (c) = nc;
9089 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9090 while (1)
9092 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9093 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9094 break;
9095 nc = OMP_CLAUSE_CHAIN (nc);
9097 OMP_CLAUSE_CHAIN (nc) = next;
9098 n->value |= GOVD_MAP;
9101 if (DECL_P (decl)
9102 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9103 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9104 break;
9105 case OMP_CLAUSE_COPYIN:
9106 case OMP_CLAUSE_COPYPRIVATE:
9107 case OMP_CLAUSE_IF:
9108 case OMP_CLAUSE_NUM_THREADS:
9109 case OMP_CLAUSE_NUM_TEAMS:
9110 case OMP_CLAUSE_THREAD_LIMIT:
9111 case OMP_CLAUSE_DIST_SCHEDULE:
9112 case OMP_CLAUSE_DEVICE:
9113 case OMP_CLAUSE_SCHEDULE:
9114 case OMP_CLAUSE_NOWAIT:
9115 case OMP_CLAUSE_ORDERED:
9116 case OMP_CLAUSE_DEFAULT:
9117 case OMP_CLAUSE_UNTIED:
9118 case OMP_CLAUSE_COLLAPSE:
9119 case OMP_CLAUSE_FINAL:
9120 case OMP_CLAUSE_MERGEABLE:
9121 case OMP_CLAUSE_PROC_BIND:
9122 case OMP_CLAUSE_SAFELEN:
9123 case OMP_CLAUSE_SIMDLEN:
9124 case OMP_CLAUSE_DEPEND:
9125 case OMP_CLAUSE_PRIORITY:
9126 case OMP_CLAUSE_GRAINSIZE:
9127 case OMP_CLAUSE_NUM_TASKS:
9128 case OMP_CLAUSE_NOGROUP:
9129 case OMP_CLAUSE_THREADS:
9130 case OMP_CLAUSE_SIMD:
9131 case OMP_CLAUSE_HINT:
9132 case OMP_CLAUSE_DEFAULTMAP:
9133 case OMP_CLAUSE_USE_DEVICE_PTR:
9134 case OMP_CLAUSE_IS_DEVICE_PTR:
9135 case OMP_CLAUSE__CILK_FOR_COUNT_:
9136 case OMP_CLAUSE_ASYNC:
9137 case OMP_CLAUSE_WAIT:
9138 case OMP_CLAUSE_INDEPENDENT:
9139 case OMP_CLAUSE_NUM_GANGS:
9140 case OMP_CLAUSE_NUM_WORKERS:
9141 case OMP_CLAUSE_VECTOR_LENGTH:
9142 case OMP_CLAUSE_GANG:
9143 case OMP_CLAUSE_WORKER:
9144 case OMP_CLAUSE_VECTOR:
9145 case OMP_CLAUSE_AUTO:
9146 case OMP_CLAUSE_SEQ:
9147 case OMP_CLAUSE_TILE:
9148 break;
9150 default:
9151 gcc_unreachable ();
9154 if (remove)
9155 *list_p = OMP_CLAUSE_CHAIN (c);
9156 else
9157 list_p = &OMP_CLAUSE_CHAIN (c);
9160 /* Add in any implicit data sharing. */
9161 struct gimplify_adjust_omp_clauses_data data;
9162 data.list_p = list_p;
9163 data.pre_p = pre_p;
9164 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9166 gimplify_omp_ctxp = ctx->outer_context;
9167 delete_omp_context (ctx);
9170 /* Gimplify OACC_CACHE. */
9172 static void
9173 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9175 tree expr = *expr_p;
9177 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9178 OACC_CACHE);
9179 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9180 OACC_CACHE);
9182 /* TODO: Do something sensible with this information. */
9184 *expr_p = NULL_TREE;
9187 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9188 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9189 kind. The entry kind will replace the one in CLAUSE, while the exit
9190 kind will be used in a new omp_clause and returned to the caller. */
9192 static tree
9193 gimplify_oacc_declare_1 (tree clause)
9195 HOST_WIDE_INT kind, new_op;
9196 bool ret = false;
9197 tree c = NULL;
9199 kind = OMP_CLAUSE_MAP_KIND (clause);
9201 switch (kind)
9203 case GOMP_MAP_ALLOC:
9204 case GOMP_MAP_FORCE_ALLOC:
9205 case GOMP_MAP_FORCE_TO:
9206 new_op = GOMP_MAP_DELETE;
9207 ret = true;
9208 break;
9210 case GOMP_MAP_FORCE_FROM:
9211 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9212 new_op = GOMP_MAP_FORCE_FROM;
9213 ret = true;
9214 break;
9216 case GOMP_MAP_FORCE_TOFROM:
9217 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9218 new_op = GOMP_MAP_FORCE_FROM;
9219 ret = true;
9220 break;
9222 case GOMP_MAP_FROM:
9223 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9224 new_op = GOMP_MAP_FROM;
9225 ret = true;
9226 break;
9228 case GOMP_MAP_TOFROM:
9229 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9230 new_op = GOMP_MAP_FROM;
9231 ret = true;
9232 break;
9234 case GOMP_MAP_DEVICE_RESIDENT:
9235 case GOMP_MAP_FORCE_DEVICEPTR:
9236 case GOMP_MAP_FORCE_PRESENT:
9237 case GOMP_MAP_LINK:
9238 case GOMP_MAP_POINTER:
9239 case GOMP_MAP_TO:
9240 break;
9242 default:
9243 gcc_unreachable ();
9244 break;
9247 if (ret)
9249 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9250 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9251 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9254 return c;
9257 /* Gimplify OACC_DECLARE. */
9259 static void
9260 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9262 tree expr = *expr_p;
9263 gomp_target *stmt;
9264 tree clauses, t, decl;
9266 clauses = OACC_DECLARE_CLAUSES (expr);
9268 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9269 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
9271 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9273 decl = OMP_CLAUSE_DECL (t);
9275 if (TREE_CODE (decl) == MEM_REF)
9276 decl = TREE_OPERAND (decl, 0);
9278 if (VAR_P (decl) && !is_oacc_declared (decl))
9280 tree attr = get_identifier ("oacc declare target");
9281 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9282 DECL_ATTRIBUTES (decl));
9285 if (VAR_P (decl)
9286 && !is_global_var (decl)
9287 && DECL_CONTEXT (decl) == current_function_decl)
9289 tree c = gimplify_oacc_declare_1 (t);
9290 if (c)
9292 if (oacc_declare_returns == NULL)
9293 oacc_declare_returns = new hash_map<tree, tree>;
9295 oacc_declare_returns->put (decl, c);
9299 if (gimplify_omp_ctxp)
9300 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9303 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9304 clauses);
9306 gimplify_seq_add_stmt (pre_p, stmt);
9308 *expr_p = NULL_TREE;
9311 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9312 gimplification of the body, as well as scanning the body for used
9313 variables. We need to do this scan now, because variable-sized
9314 decls will be decomposed during gimplification. */
9316 static void
9317 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9319 tree expr = *expr_p;
9320 gimple *g;
9321 gimple_seq body = NULL;
9323 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9324 OMP_PARALLEL_COMBINED (expr)
9325 ? ORT_COMBINED_PARALLEL
9326 : ORT_PARALLEL, OMP_PARALLEL);
9328 push_gimplify_context ();
9330 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9331 if (gimple_code (g) == GIMPLE_BIND)
9332 pop_gimplify_context (g);
9333 else
9334 pop_gimplify_context (NULL);
9336 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9337 OMP_PARALLEL);
9339 g = gimple_build_omp_parallel (body,
9340 OMP_PARALLEL_CLAUSES (expr),
9341 NULL_TREE, NULL_TREE);
9342 if (OMP_PARALLEL_COMBINED (expr))
9343 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9344 gimplify_seq_add_stmt (pre_p, g);
9345 *expr_p = NULL_TREE;
9348 /* Gimplify the contents of an OMP_TASK statement. This involves
9349 gimplification of the body, as well as scanning the body for used
9350 variables. We need to do this scan now, because variable-sized
9351 decls will be decomposed during gimplification. */
9353 static void
9354 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9356 tree expr = *expr_p;
9357 gimple *g;
9358 gimple_seq body = NULL;
9360 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9361 omp_find_clause (OMP_TASK_CLAUSES (expr),
9362 OMP_CLAUSE_UNTIED)
9363 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9365 push_gimplify_context ();
9367 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9368 if (gimple_code (g) == GIMPLE_BIND)
9369 pop_gimplify_context (g);
9370 else
9371 pop_gimplify_context (NULL);
9373 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9374 OMP_TASK);
9376 g = gimple_build_omp_task (body,
9377 OMP_TASK_CLAUSES (expr),
9378 NULL_TREE, NULL_TREE,
9379 NULL_TREE, NULL_TREE, NULL_TREE);
9380 gimplify_seq_add_stmt (pre_p, g);
9381 *expr_p = NULL_TREE;
9384 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9385 with non-NULL OMP_FOR_INIT. */
9387 static tree
9388 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9390 *walk_subtrees = 0;
9391 switch (TREE_CODE (*tp))
9393 case OMP_FOR:
9394 *walk_subtrees = 1;
9395 /* FALLTHRU */
9396 case OMP_SIMD:
9397 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9398 return *tp;
9399 break;
9400 case BIND_EXPR:
9401 case STATEMENT_LIST:
9402 case OMP_PARALLEL:
9403 *walk_subtrees = 1;
9404 break;
9405 default:
9406 break;
9408 return NULL_TREE;
9411 /* Gimplify the gross structure of an OMP_FOR statement. */
9413 static enum gimplify_status
9414 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9416 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9417 enum gimplify_status ret = GS_ALL_DONE;
9418 enum gimplify_status tret;
9419 gomp_for *gfor;
9420 gimple_seq for_body, for_pre_body;
9421 int i;
9422 bitmap has_decl_expr = NULL;
9423 enum omp_region_type ort = ORT_WORKSHARE;
9425 orig_for_stmt = for_stmt = *expr_p;
9427 switch (TREE_CODE (for_stmt))
9429 case OMP_FOR:
9430 case CILK_FOR:
9431 case OMP_DISTRIBUTE:
9432 break;
9433 case OACC_LOOP:
9434 ort = ORT_ACC;
9435 break;
9436 case OMP_TASKLOOP:
9437 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9438 ort = ORT_UNTIED_TASK;
9439 else
9440 ort = ORT_TASK;
9441 break;
9442 case OMP_SIMD:
9443 case CILK_SIMD:
9444 ort = ORT_SIMD;
9445 break;
9446 default:
9447 gcc_unreachable ();
9450 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9451 clause for the IV. */
9452 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9454 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9455 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9456 decl = TREE_OPERAND (t, 0);
9457 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9458 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9459 && OMP_CLAUSE_DECL (c) == decl)
9461 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9462 break;
9466 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9468 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9469 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9470 find_combined_omp_for, NULL, NULL);
9471 if (inner_for_stmt == NULL_TREE)
9473 gcc_assert (seen_error ());
9474 *expr_p = NULL_TREE;
9475 return GS_ERROR;
9479 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9480 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9481 TREE_CODE (for_stmt));
9483 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9484 gimplify_omp_ctxp->distribute = true;
9486 /* Handle OMP_FOR_INIT. */
9487 for_pre_body = NULL;
9488 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9490 has_decl_expr = BITMAP_ALLOC (NULL);
9491 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9492 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9493 == VAR_DECL)
9495 t = OMP_FOR_PRE_BODY (for_stmt);
9496 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9498 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9500 tree_stmt_iterator si;
9501 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9502 tsi_next (&si))
9504 t = tsi_stmt (si);
9505 if (TREE_CODE (t) == DECL_EXPR
9506 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9507 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9511 if (OMP_FOR_PRE_BODY (for_stmt))
9513 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9514 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9515 else
9517 struct gimplify_omp_ctx ctx;
9518 memset (&ctx, 0, sizeof (ctx));
9519 ctx.region_type = ORT_NONE;
9520 gimplify_omp_ctxp = &ctx;
9521 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9522 gimplify_omp_ctxp = NULL;
9525 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9527 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9528 for_stmt = inner_for_stmt;
9530 /* For taskloop, need to gimplify the start, end and step before the
9531 taskloop, outside of the taskloop omp context. */
9532 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9534 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9536 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9537 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9539 TREE_OPERAND (t, 1)
9540 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9541 pre_p, NULL, false);
9542 tree c = build_omp_clause (input_location,
9543 OMP_CLAUSE_FIRSTPRIVATE);
9544 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9545 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9546 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9549 /* Handle OMP_FOR_COND. */
9550 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9551 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9553 TREE_OPERAND (t, 1)
9554 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9555 gimple_seq_empty_p (for_pre_body)
9556 ? pre_p : &for_pre_body, NULL,
9557 false);
9558 tree c = build_omp_clause (input_location,
9559 OMP_CLAUSE_FIRSTPRIVATE);
9560 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9561 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9562 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9565 /* Handle OMP_FOR_INCR. */
9566 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9567 if (TREE_CODE (t) == MODIFY_EXPR)
9569 decl = TREE_OPERAND (t, 0);
9570 t = TREE_OPERAND (t, 1);
9571 tree *tp = &TREE_OPERAND (t, 1);
9572 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9573 tp = &TREE_OPERAND (t, 0);
9575 if (!is_gimple_constant (*tp))
9577 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9578 ? pre_p : &for_pre_body;
9579 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
9580 tree c = build_omp_clause (input_location,
9581 OMP_CLAUSE_FIRSTPRIVATE);
9582 OMP_CLAUSE_DECL (c) = *tp;
9583 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9584 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9589 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9590 OMP_TASKLOOP);
9593 if (orig_for_stmt != for_stmt)
9594 gimplify_omp_ctxp->combined_loop = true;
9596 for_body = NULL;
9597 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9598 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9599 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9600 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9602 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9603 bool is_doacross = false;
9604 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9606 is_doacross = true;
9607 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9608 (OMP_FOR_INIT (for_stmt))
9609 * 2);
9611 int collapse = 1, tile = 0;
9612 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9613 if (c)
9614 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9615 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9616 if (c)
9617 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
9618 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9620 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9621 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9622 decl = TREE_OPERAND (t, 0);
9623 gcc_assert (DECL_P (decl));
9624 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9625 || POINTER_TYPE_P (TREE_TYPE (decl)));
9626 if (is_doacross)
9628 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9629 gimplify_omp_ctxp->loop_iter_var.quick_push
9630 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9631 else
9632 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9633 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9636 /* Make sure the iteration variable is private. */
9637 tree c = NULL_TREE;
9638 tree c2 = NULL_TREE;
9639 if (orig_for_stmt != for_stmt)
9640 /* Do this only on innermost construct for combined ones. */;
9641 else if (ort == ORT_SIMD)
9643 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9644 (splay_tree_key) decl);
9645 omp_is_private (gimplify_omp_ctxp, decl,
9646 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9647 != 1));
9648 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9649 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9650 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9652 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9653 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9654 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9655 if (has_decl_expr
9656 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9658 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9659 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9661 struct gimplify_omp_ctx *outer
9662 = gimplify_omp_ctxp->outer_context;
9663 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9665 if (outer->region_type == ORT_WORKSHARE
9666 && outer->combined_loop)
9668 n = splay_tree_lookup (outer->variables,
9669 (splay_tree_key)decl);
9670 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9672 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9673 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9675 else
9677 struct gimplify_omp_ctx *octx = outer->outer_context;
9678 if (octx
9679 && octx->region_type == ORT_COMBINED_PARALLEL
9680 && octx->outer_context
9681 && (octx->outer_context->region_type
9682 == ORT_WORKSHARE)
9683 && octx->outer_context->combined_loop)
9685 octx = octx->outer_context;
9686 n = splay_tree_lookup (octx->variables,
9687 (splay_tree_key)decl);
9688 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9690 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9691 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9698 OMP_CLAUSE_DECL (c) = decl;
9699 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9700 OMP_FOR_CLAUSES (for_stmt) = c;
9701 omp_add_variable (gimplify_omp_ctxp, decl, flags);
9702 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9704 if (outer->region_type == ORT_WORKSHARE
9705 && outer->combined_loop)
9707 if (outer->outer_context
9708 && (outer->outer_context->region_type
9709 == ORT_COMBINED_PARALLEL))
9710 outer = outer->outer_context;
9711 else if (omp_check_private (outer, decl, false))
9712 outer = NULL;
9714 else if (((outer->region_type & ORT_TASK) != 0)
9715 && outer->combined_loop
9716 && !omp_check_private (gimplify_omp_ctxp,
9717 decl, false))
9719 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9721 omp_notice_variable (outer, decl, true);
9722 outer = NULL;
9724 if (outer)
9726 n = splay_tree_lookup (outer->variables,
9727 (splay_tree_key)decl);
9728 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9730 omp_add_variable (outer, decl,
9731 GOVD_LASTPRIVATE | GOVD_SEEN);
9732 if (outer->region_type == ORT_COMBINED_PARALLEL
9733 && outer->outer_context
9734 && (outer->outer_context->region_type
9735 == ORT_WORKSHARE)
9736 && outer->outer_context->combined_loop)
9738 outer = outer->outer_context;
9739 n = splay_tree_lookup (outer->variables,
9740 (splay_tree_key)decl);
9741 if (omp_check_private (outer, decl, false))
9742 outer = NULL;
9743 else if (n == NULL
9744 || ((n->value & GOVD_DATA_SHARE_CLASS)
9745 == 0))
9746 omp_add_variable (outer, decl,
9747 GOVD_LASTPRIVATE
9748 | GOVD_SEEN);
9749 else
9750 outer = NULL;
9752 if (outer && outer->outer_context
9753 && (outer->outer_context->region_type
9754 == ORT_COMBINED_TEAMS))
9756 outer = outer->outer_context;
9757 n = splay_tree_lookup (outer->variables,
9758 (splay_tree_key)decl);
9759 if (n == NULL
9760 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9761 omp_add_variable (outer, decl,
9762 GOVD_SHARED | GOVD_SEEN);
9763 else
9764 outer = NULL;
9766 if (outer && outer->outer_context)
9767 omp_notice_variable (outer->outer_context, decl,
9768 true);
9773 else
9775 bool lastprivate
9776 = (!has_decl_expr
9777 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
9778 struct gimplify_omp_ctx *outer
9779 = gimplify_omp_ctxp->outer_context;
9780 if (outer && lastprivate)
9782 if (outer->region_type == ORT_WORKSHARE
9783 && outer->combined_loop)
9785 n = splay_tree_lookup (outer->variables,
9786 (splay_tree_key)decl);
9787 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9789 lastprivate = false;
9790 outer = NULL;
9792 else if (outer->outer_context
9793 && (outer->outer_context->region_type
9794 == ORT_COMBINED_PARALLEL))
9795 outer = outer->outer_context;
9796 else if (omp_check_private (outer, decl, false))
9797 outer = NULL;
9799 else if (((outer->region_type & ORT_TASK) != 0)
9800 && outer->combined_loop
9801 && !omp_check_private (gimplify_omp_ctxp,
9802 decl, false))
9804 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9806 omp_notice_variable (outer, decl, true);
9807 outer = NULL;
9809 if (outer)
9811 n = splay_tree_lookup (outer->variables,
9812 (splay_tree_key)decl);
9813 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9815 omp_add_variable (outer, decl,
9816 GOVD_LASTPRIVATE | GOVD_SEEN);
9817 if (outer->region_type == ORT_COMBINED_PARALLEL
9818 && outer->outer_context
9819 && (outer->outer_context->region_type
9820 == ORT_WORKSHARE)
9821 && outer->outer_context->combined_loop)
9823 outer = outer->outer_context;
9824 n = splay_tree_lookup (outer->variables,
9825 (splay_tree_key)decl);
9826 if (omp_check_private (outer, decl, false))
9827 outer = NULL;
9828 else if (n == NULL
9829 || ((n->value & GOVD_DATA_SHARE_CLASS)
9830 == 0))
9831 omp_add_variable (outer, decl,
9832 GOVD_LASTPRIVATE
9833 | GOVD_SEEN);
9834 else
9835 outer = NULL;
9837 if (outer && outer->outer_context
9838 && (outer->outer_context->region_type
9839 == ORT_COMBINED_TEAMS))
9841 outer = outer->outer_context;
9842 n = splay_tree_lookup (outer->variables,
9843 (splay_tree_key)decl);
9844 if (n == NULL
9845 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9846 omp_add_variable (outer, decl,
9847 GOVD_SHARED | GOVD_SEEN);
9848 else
9849 outer = NULL;
9851 if (outer && outer->outer_context)
9852 omp_notice_variable (outer->outer_context, decl,
9853 true);
9858 c = build_omp_clause (input_location,
9859 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9860 : OMP_CLAUSE_PRIVATE);
9861 OMP_CLAUSE_DECL (c) = decl;
9862 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9863 OMP_FOR_CLAUSES (for_stmt) = c;
9864 omp_add_variable (gimplify_omp_ctxp, decl,
9865 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9866 | GOVD_EXPLICIT | GOVD_SEEN);
9867 c = NULL_TREE;
9870 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9871 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9872 else
9873 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9875 /* If DECL is not a gimple register, create a temporary variable to act
9876 as an iteration counter. This is valid, since DECL cannot be
9877 modified in the body of the loop. Similarly for any iteration vars
9878 in simd with collapse > 1 where the iterator vars must be
9879 lastprivate. */
9880 if (orig_for_stmt != for_stmt)
9881 var = decl;
9882 else if (!is_gimple_reg (decl)
9883 || (ort == ORT_SIMD
9884 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9886 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9887 /* Make sure omp_add_variable is not called on it prematurely.
9888 We call it ourselves a few lines later. */
9889 gimplify_omp_ctxp = NULL;
9890 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9891 gimplify_omp_ctxp = ctx;
9892 TREE_OPERAND (t, 0) = var;
9894 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9896 if (ort == ORT_SIMD
9897 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9899 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9900 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9901 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9902 OMP_CLAUSE_DECL (c2) = var;
9903 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9904 OMP_FOR_CLAUSES (for_stmt) = c2;
9905 omp_add_variable (gimplify_omp_ctxp, var,
9906 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
9907 if (c == NULL_TREE)
9909 c = c2;
9910 c2 = NULL_TREE;
9913 else
9914 omp_add_variable (gimplify_omp_ctxp, var,
9915 GOVD_PRIVATE | GOVD_SEEN);
9917 else
9918 var = decl;
9920 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9921 is_gimple_val, fb_rvalue, false);
9922 ret = MIN (ret, tret);
9923 if (ret == GS_ERROR)
9924 return ret;
9926 /* Handle OMP_FOR_COND. */
9927 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9928 gcc_assert (COMPARISON_CLASS_P (t));
9929 gcc_assert (TREE_OPERAND (t, 0) == decl);
9931 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9932 is_gimple_val, fb_rvalue, false);
9933 ret = MIN (ret, tret);
9935 /* Handle OMP_FOR_INCR. */
9936 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9937 switch (TREE_CODE (t))
9939 case PREINCREMENT_EXPR:
9940 case POSTINCREMENT_EXPR:
9942 tree decl = TREE_OPERAND (t, 0);
9943 /* c_omp_for_incr_canonicalize_ptr() should have been
9944 called to massage things appropriately. */
9945 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
9947 if (orig_for_stmt != for_stmt)
9948 break;
9949 t = build_int_cst (TREE_TYPE (decl), 1);
9950 if (c)
9951 OMP_CLAUSE_LINEAR_STEP (c) = t;
9952 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
9953 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
9954 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
9955 break;
9958 case PREDECREMENT_EXPR:
9959 case POSTDECREMENT_EXPR:
9960 /* c_omp_for_incr_canonicalize_ptr() should have been
9961 called to massage things appropriately. */
9962 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
9963 if (orig_for_stmt != for_stmt)
9964 break;
9965 t = build_int_cst (TREE_TYPE (decl), -1);
9966 if (c)
9967 OMP_CLAUSE_LINEAR_STEP (c) = t;
9968 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
9969 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
9970 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
9971 break;
9973 case MODIFY_EXPR:
9974 gcc_assert (TREE_OPERAND (t, 0) == decl);
9975 TREE_OPERAND (t, 0) = var;
9977 t = TREE_OPERAND (t, 1);
9978 switch (TREE_CODE (t))
9980 case PLUS_EXPR:
9981 if (TREE_OPERAND (t, 1) == decl)
9983 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
9984 TREE_OPERAND (t, 0) = var;
9985 break;
9988 /* Fallthru. */
9989 case MINUS_EXPR:
9990 case POINTER_PLUS_EXPR:
9991 gcc_assert (TREE_OPERAND (t, 0) == decl);
9992 TREE_OPERAND (t, 0) = var;
9993 break;
9994 default:
9995 gcc_unreachable ();
9998 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9999 is_gimple_val, fb_rvalue, false);
10000 ret = MIN (ret, tret);
10001 if (c)
10003 tree step = TREE_OPERAND (t, 1);
10004 tree stept = TREE_TYPE (decl);
10005 if (POINTER_TYPE_P (stept))
10006 stept = sizetype;
10007 step = fold_convert (stept, step);
10008 if (TREE_CODE (t) == MINUS_EXPR)
10009 step = fold_build1 (NEGATE_EXPR, stept, step);
10010 OMP_CLAUSE_LINEAR_STEP (c) = step;
10011 if (step != TREE_OPERAND (t, 1))
10013 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10014 &for_pre_body, NULL,
10015 is_gimple_val, fb_rvalue, false);
10016 ret = MIN (ret, tret);
10019 break;
10021 default:
10022 gcc_unreachable ();
10025 if (c2)
10027 gcc_assert (c);
10028 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10031 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
10033 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
10034 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10035 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10036 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10037 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10038 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10039 && OMP_CLAUSE_DECL (c) == decl)
10041 if (is_doacross && (collapse == 1 || i >= collapse))
10042 t = var;
10043 else
10045 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10046 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10047 gcc_assert (TREE_OPERAND (t, 0) == var);
10048 t = TREE_OPERAND (t, 1);
10049 gcc_assert (TREE_CODE (t) == PLUS_EXPR
10050 || TREE_CODE (t) == MINUS_EXPR
10051 || TREE_CODE (t) == POINTER_PLUS_EXPR);
10052 gcc_assert (TREE_OPERAND (t, 0) == var);
10053 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10054 is_doacross ? var : decl,
10055 TREE_OPERAND (t, 1));
10057 gimple_seq *seq;
10058 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10059 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10060 else
10061 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10062 gimplify_assign (decl, t, seq);
10067 BITMAP_FREE (has_decl_expr);
10069 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10071 push_gimplify_context ();
10072 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10074 OMP_FOR_BODY (orig_for_stmt)
10075 = build3 (BIND_EXPR, void_type_node, NULL,
10076 OMP_FOR_BODY (orig_for_stmt), NULL);
10077 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10081 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10082 &for_body);
10084 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10086 if (gimple_code (g) == GIMPLE_BIND)
10087 pop_gimplify_context (g);
10088 else
10089 pop_gimplify_context (NULL);
10092 if (orig_for_stmt != for_stmt)
10093 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10095 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10096 decl = TREE_OPERAND (t, 0);
10097 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10098 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10099 gimplify_omp_ctxp = ctx->outer_context;
10100 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10101 gimplify_omp_ctxp = ctx;
10102 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10103 TREE_OPERAND (t, 0) = var;
10104 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10105 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10106 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10109 gimplify_adjust_omp_clauses (pre_p, for_body,
10110 &OMP_FOR_CLAUSES (orig_for_stmt),
10111 TREE_CODE (orig_for_stmt));
10113 int kind;
10114 switch (TREE_CODE (orig_for_stmt))
10116 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10117 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10118 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
10119 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
10120 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10121 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10122 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10123 default:
10124 gcc_unreachable ();
10126 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10127 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10128 for_pre_body);
10129 if (orig_for_stmt != for_stmt)
10130 gimple_omp_for_set_combined_p (gfor, true);
10131 if (gimplify_omp_ctxp
10132 && (gimplify_omp_ctxp->combined_loop
10133 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10134 && gimplify_omp_ctxp->outer_context
10135 && gimplify_omp_ctxp->outer_context->combined_loop)))
10137 gimple_omp_for_set_combined_into_p (gfor, true);
10138 if (gimplify_omp_ctxp->combined_loop)
10139 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10140 else
10141 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10144 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10146 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10147 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10148 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10149 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10150 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10151 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10152 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10153 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10156 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10157 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10158 The outer taskloop stands for computing the number of iterations,
10159 counts for collapsed loops and holding taskloop specific clauses.
10160 The task construct stands for the effect of data sharing on the
10161 explicit task it creates and the inner taskloop stands for expansion
10162 of the static loop inside of the explicit task construct. */
10163 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10165 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10166 tree task_clauses = NULL_TREE;
10167 tree c = *gfor_clauses_ptr;
10168 tree *gtask_clauses_ptr = &task_clauses;
10169 tree outer_for_clauses = NULL_TREE;
10170 tree *gforo_clauses_ptr = &outer_for_clauses;
10171 for (; c; c = OMP_CLAUSE_CHAIN (c))
10172 switch (OMP_CLAUSE_CODE (c))
10174 /* These clauses are allowed on task, move them there. */
10175 case OMP_CLAUSE_SHARED:
10176 case OMP_CLAUSE_FIRSTPRIVATE:
10177 case OMP_CLAUSE_DEFAULT:
10178 case OMP_CLAUSE_IF:
10179 case OMP_CLAUSE_UNTIED:
10180 case OMP_CLAUSE_FINAL:
10181 case OMP_CLAUSE_MERGEABLE:
10182 case OMP_CLAUSE_PRIORITY:
10183 *gtask_clauses_ptr = c;
10184 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10185 break;
10186 case OMP_CLAUSE_PRIVATE:
10187 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10189 /* We want private on outer for and firstprivate
10190 on task. */
10191 *gtask_clauses_ptr
10192 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10193 OMP_CLAUSE_FIRSTPRIVATE);
10194 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10195 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10196 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10197 *gforo_clauses_ptr = c;
10198 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10200 else
10202 *gtask_clauses_ptr = c;
10203 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10205 break;
10206 /* These clauses go into outer taskloop clauses. */
10207 case OMP_CLAUSE_GRAINSIZE:
10208 case OMP_CLAUSE_NUM_TASKS:
10209 case OMP_CLAUSE_NOGROUP:
10210 *gforo_clauses_ptr = c;
10211 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10212 break;
10213 /* Taskloop clause we duplicate on both taskloops. */
10214 case OMP_CLAUSE_COLLAPSE:
10215 *gfor_clauses_ptr = c;
10216 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10217 *gforo_clauses_ptr = copy_node (c);
10218 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10219 break;
10220 /* For lastprivate, keep the clause on inner taskloop, and add
10221 a shared clause on task. If the same decl is also firstprivate,
10222 add also firstprivate clause on the inner taskloop. */
10223 case OMP_CLAUSE_LASTPRIVATE:
10224 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10226 /* For taskloop C++ lastprivate IVs, we want:
10227 1) private on outer taskloop
10228 2) firstprivate and shared on task
10229 3) lastprivate on inner taskloop */
10230 *gtask_clauses_ptr
10231 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10232 OMP_CLAUSE_FIRSTPRIVATE);
10233 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10234 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10235 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10236 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10237 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10238 OMP_CLAUSE_PRIVATE);
10239 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10240 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10241 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10242 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10244 *gfor_clauses_ptr = c;
10245 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10246 *gtask_clauses_ptr
10247 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10248 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10249 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10250 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10251 gtask_clauses_ptr
10252 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10253 break;
10254 default:
10255 gcc_unreachable ();
10257 *gfor_clauses_ptr = NULL_TREE;
10258 *gtask_clauses_ptr = NULL_TREE;
10259 *gforo_clauses_ptr = NULL_TREE;
10260 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10261 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10262 NULL_TREE, NULL_TREE, NULL_TREE);
10263 gimple_omp_task_set_taskloop_p (g, true);
10264 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10265 gomp_for *gforo
10266 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10267 gimple_omp_for_collapse (gfor),
10268 gimple_omp_for_pre_body (gfor));
10269 gimple_omp_for_set_pre_body (gfor, NULL);
10270 gimple_omp_for_set_combined_p (gforo, true);
10271 gimple_omp_for_set_combined_into_p (gfor, true);
10272 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10274 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10275 tree v = create_tmp_var (type);
10276 gimple_omp_for_set_index (gforo, i, v);
10277 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10278 gimple_omp_for_set_initial (gforo, i, t);
10279 gimple_omp_for_set_cond (gforo, i,
10280 gimple_omp_for_cond (gfor, i));
10281 t = unshare_expr (gimple_omp_for_final (gfor, i));
10282 gimple_omp_for_set_final (gforo, i, t);
10283 t = unshare_expr (gimple_omp_for_incr (gfor, i));
10284 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10285 TREE_OPERAND (t, 0) = v;
10286 gimple_omp_for_set_incr (gforo, i, t);
10287 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10288 OMP_CLAUSE_DECL (t) = v;
10289 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10290 gimple_omp_for_set_clauses (gforo, t);
10292 gimplify_seq_add_stmt (pre_p, gforo);
10294 else
10295 gimplify_seq_add_stmt (pre_p, gfor);
10296 if (ret != GS_ALL_DONE)
10297 return GS_ERROR;
10298 *expr_p = NULL_TREE;
10299 return GS_ALL_DONE;
10302 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10303 of OMP_TARGET's body. */
10305 static tree
10306 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10308 *walk_subtrees = 0;
10309 switch (TREE_CODE (*tp))
10311 case OMP_TEAMS:
10312 return *tp;
10313 case BIND_EXPR:
10314 case STATEMENT_LIST:
10315 *walk_subtrees = 1;
10316 break;
10317 default:
10318 break;
10320 return NULL_TREE;
10323 /* Helper function of optimize_target_teams, determine if the expression
10324 can be computed safely before the target construct on the host. */
10326 static tree
10327 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10329 splay_tree_node n;
10331 if (TYPE_P (*tp))
10333 *walk_subtrees = 0;
10334 return NULL_TREE;
10336 switch (TREE_CODE (*tp))
10338 case VAR_DECL:
10339 case PARM_DECL:
10340 case RESULT_DECL:
10341 *walk_subtrees = 0;
10342 if (error_operand_p (*tp)
10343 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10344 || DECL_HAS_VALUE_EXPR_P (*tp)
10345 || DECL_THREAD_LOCAL_P (*tp)
10346 || TREE_SIDE_EFFECTS (*tp)
10347 || TREE_THIS_VOLATILE (*tp))
10348 return *tp;
10349 if (is_global_var (*tp)
10350 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10351 || lookup_attribute ("omp declare target link",
10352 DECL_ATTRIBUTES (*tp))))
10353 return *tp;
10354 if (VAR_P (*tp)
10355 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10356 && !is_global_var (*tp)
10357 && decl_function_context (*tp) == current_function_decl)
10358 return *tp;
10359 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10360 (splay_tree_key) *tp);
10361 if (n == NULL)
10363 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10364 return NULL_TREE;
10365 return *tp;
10367 else if (n->value & GOVD_LOCAL)
10368 return *tp;
10369 else if (n->value & GOVD_FIRSTPRIVATE)
10370 return NULL_TREE;
10371 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10372 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10373 return NULL_TREE;
10374 return *tp;
10375 case INTEGER_CST:
10376 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10377 return *tp;
10378 return NULL_TREE;
10379 case TARGET_EXPR:
10380 if (TARGET_EXPR_INITIAL (*tp)
10381 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10382 return *tp;
10383 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10384 walk_subtrees, NULL);
10385 /* Allow some reasonable subset of integral arithmetics. */
10386 case PLUS_EXPR:
10387 case MINUS_EXPR:
10388 case MULT_EXPR:
10389 case TRUNC_DIV_EXPR:
10390 case CEIL_DIV_EXPR:
10391 case FLOOR_DIV_EXPR:
10392 case ROUND_DIV_EXPR:
10393 case TRUNC_MOD_EXPR:
10394 case CEIL_MOD_EXPR:
10395 case FLOOR_MOD_EXPR:
10396 case ROUND_MOD_EXPR:
10397 case RDIV_EXPR:
10398 case EXACT_DIV_EXPR:
10399 case MIN_EXPR:
10400 case MAX_EXPR:
10401 case LSHIFT_EXPR:
10402 case RSHIFT_EXPR:
10403 case BIT_IOR_EXPR:
10404 case BIT_XOR_EXPR:
10405 case BIT_AND_EXPR:
10406 case NEGATE_EXPR:
10407 case ABS_EXPR:
10408 case BIT_NOT_EXPR:
10409 case NON_LVALUE_EXPR:
10410 CASE_CONVERT:
10411 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10412 return *tp;
10413 return NULL_TREE;
10414 /* And disallow anything else, except for comparisons. */
10415 default:
10416 if (COMPARISON_CLASS_P (*tp))
10417 return NULL_TREE;
10418 return *tp;
10422 /* Try to determine if the num_teams and/or thread_limit expressions
10423 can have their values determined already before entering the
10424 target construct.
10425 INTEGER_CSTs trivially are,
10426 integral decls that are firstprivate (explicitly or implicitly)
10427 or explicitly map(always, to:) or map(always, tofrom:) on the target
10428 region too, and expressions involving simple arithmetics on those
10429 too, function calls are not ok, dereferencing something neither etc.
10430 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10431 EXPR based on what we find:
10432 0 stands for clause not specified at all, use implementation default
10433 -1 stands for value that can't be determined easily before entering
10434 the target construct.
10435 If teams construct is not present at all, use 1 for num_teams
10436 and 0 for thread_limit (only one team is involved, and the thread
10437 limit is implementation defined. */
10439 static void
10440 optimize_target_teams (tree target, gimple_seq *pre_p)
10442 tree body = OMP_BODY (target);
10443 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10444 tree num_teams = integer_zero_node;
10445 tree thread_limit = integer_zero_node;
10446 location_t num_teams_loc = EXPR_LOCATION (target);
10447 location_t thread_limit_loc = EXPR_LOCATION (target);
10448 tree c, *p, expr;
10449 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10451 if (teams == NULL_TREE)
10452 num_teams = integer_one_node;
10453 else
10454 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10456 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10458 p = &num_teams;
10459 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10461 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10463 p = &thread_limit;
10464 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10466 else
10467 continue;
10468 expr = OMP_CLAUSE_OPERAND (c, 0);
10469 if (TREE_CODE (expr) == INTEGER_CST)
10471 *p = expr;
10472 continue;
10474 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10476 *p = integer_minus_one_node;
10477 continue;
10479 *p = expr;
10480 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10481 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
10482 == GS_ERROR)
10484 gimplify_omp_ctxp = target_ctx;
10485 *p = integer_minus_one_node;
10486 continue;
10488 gimplify_omp_ctxp = target_ctx;
10489 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10490 OMP_CLAUSE_OPERAND (c, 0) = *p;
10492 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10493 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10494 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10495 OMP_TARGET_CLAUSES (target) = c;
10496 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10497 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10498 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10499 OMP_TARGET_CLAUSES (target) = c;
10502 /* Gimplify the gross structure of several OMP constructs. */
10504 static void
10505 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10507 tree expr = *expr_p;
10508 gimple *stmt;
10509 gimple_seq body = NULL;
10510 enum omp_region_type ort;
10512 switch (TREE_CODE (expr))
10514 case OMP_SECTIONS:
10515 case OMP_SINGLE:
10516 ort = ORT_WORKSHARE;
10517 break;
10518 case OMP_TARGET:
10519 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10520 break;
10521 case OACC_KERNELS:
10522 ort = ORT_ACC_KERNELS;
10523 break;
10524 case OACC_PARALLEL:
10525 ort = ORT_ACC_PARALLEL;
10526 break;
10527 case OACC_DATA:
10528 ort = ORT_ACC_DATA;
10529 break;
10530 case OMP_TARGET_DATA:
10531 ort = ORT_TARGET_DATA;
10532 break;
10533 case OMP_TEAMS:
10534 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10535 break;
10536 case OACC_HOST_DATA:
10537 ort = ORT_ACC_HOST_DATA;
10538 break;
10539 default:
10540 gcc_unreachable ();
10542 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10543 TREE_CODE (expr));
10544 if (TREE_CODE (expr) == OMP_TARGET)
10545 optimize_target_teams (expr, pre_p);
10546 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10548 push_gimplify_context ();
10549 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10550 if (gimple_code (g) == GIMPLE_BIND)
10551 pop_gimplify_context (g);
10552 else
10553 pop_gimplify_context (NULL);
10554 if ((ort & ORT_TARGET_DATA) != 0)
10556 enum built_in_function end_ix;
10557 switch (TREE_CODE (expr))
10559 case OACC_DATA:
10560 case OACC_HOST_DATA:
10561 end_ix = BUILT_IN_GOACC_DATA_END;
10562 break;
10563 case OMP_TARGET_DATA:
10564 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10565 break;
10566 default:
10567 gcc_unreachable ();
10569 tree fn = builtin_decl_explicit (end_ix);
10570 g = gimple_build_call (fn, 0);
10571 gimple_seq cleanup = NULL;
10572 gimple_seq_add_stmt (&cleanup, g);
10573 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10574 body = NULL;
10575 gimple_seq_add_stmt (&body, g);
10578 else
10579 gimplify_and_add (OMP_BODY (expr), &body);
10580 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10581 TREE_CODE (expr));
10583 switch (TREE_CODE (expr))
10585 case OACC_DATA:
10586 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10587 OMP_CLAUSES (expr));
10588 break;
10589 case OACC_KERNELS:
10590 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10591 OMP_CLAUSES (expr));
10592 break;
10593 case OACC_HOST_DATA:
10594 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10595 OMP_CLAUSES (expr));
10596 break;
10597 case OACC_PARALLEL:
10598 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10599 OMP_CLAUSES (expr));
10600 break;
10601 case OMP_SECTIONS:
10602 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10603 break;
10604 case OMP_SINGLE:
10605 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10606 break;
10607 case OMP_TARGET:
10608 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10609 OMP_CLAUSES (expr));
10610 break;
10611 case OMP_TARGET_DATA:
10612 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10613 OMP_CLAUSES (expr));
10614 break;
10615 case OMP_TEAMS:
10616 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10617 break;
10618 default:
10619 gcc_unreachable ();
10622 gimplify_seq_add_stmt (pre_p, stmt);
10623 *expr_p = NULL_TREE;
10626 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10627 target update constructs. */
10629 static void
10630 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10632 tree expr = *expr_p;
10633 int kind;
10634 gomp_target *stmt;
10635 enum omp_region_type ort = ORT_WORKSHARE;
10637 switch (TREE_CODE (expr))
10639 case OACC_ENTER_DATA:
10640 case OACC_EXIT_DATA:
10641 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10642 ort = ORT_ACC;
10643 break;
10644 case OACC_UPDATE:
10645 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10646 ort = ORT_ACC;
10647 break;
10648 case OMP_TARGET_UPDATE:
10649 kind = GF_OMP_TARGET_KIND_UPDATE;
10650 break;
10651 case OMP_TARGET_ENTER_DATA:
10652 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10653 break;
10654 case OMP_TARGET_EXIT_DATA:
10655 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10656 break;
10657 default:
10658 gcc_unreachable ();
10660 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10661 ort, TREE_CODE (expr));
10662 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10663 TREE_CODE (expr));
10664 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10666 gimplify_seq_add_stmt (pre_p, stmt);
10667 *expr_p = NULL_TREE;
10670 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10671 stabilized the lhs of the atomic operation as *ADDR. Return true if
10672 EXPR is this stabilized form. */
10674 static bool
10675 goa_lhs_expr_p (tree expr, tree addr)
10677 /* Also include casts to other type variants. The C front end is fond
10678 of adding these for e.g. volatile variables. This is like
10679 STRIP_TYPE_NOPS but includes the main variant lookup. */
10680 STRIP_USELESS_TYPE_CONVERSION (expr);
10682 if (TREE_CODE (expr) == INDIRECT_REF)
10684 expr = TREE_OPERAND (expr, 0);
10685 while (expr != addr
10686 && (CONVERT_EXPR_P (expr)
10687 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10688 && TREE_CODE (expr) == TREE_CODE (addr)
10689 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10691 expr = TREE_OPERAND (expr, 0);
10692 addr = TREE_OPERAND (addr, 0);
10694 if (expr == addr)
10695 return true;
10696 return (TREE_CODE (addr) == ADDR_EXPR
10697 && TREE_CODE (expr) == ADDR_EXPR
10698 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10700 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10701 return true;
10702 return false;
10705 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10706 expression does not involve the lhs, evaluate it into a temporary.
10707 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10708 or -1 if an error was encountered. */
10710 static int
10711 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
10712 tree lhs_var)
10714 tree expr = *expr_p;
10715 int saw_lhs;
10717 if (goa_lhs_expr_p (expr, lhs_addr))
10719 *expr_p = lhs_var;
10720 return 1;
10722 if (is_gimple_val (expr))
10723 return 0;
10725 saw_lhs = 0;
10726 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10728 case tcc_binary:
10729 case tcc_comparison:
10730 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10731 lhs_var);
10732 /* FALLTHRU */
10733 case tcc_unary:
10734 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10735 lhs_var);
10736 break;
10737 case tcc_expression:
10738 switch (TREE_CODE (expr))
10740 case TRUTH_ANDIF_EXPR:
10741 case TRUTH_ORIF_EXPR:
10742 case TRUTH_AND_EXPR:
10743 case TRUTH_OR_EXPR:
10744 case TRUTH_XOR_EXPR:
10745 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10746 lhs_addr, lhs_var);
10747 /* FALLTHRU */
10748 case TRUTH_NOT_EXPR:
10749 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10750 lhs_addr, lhs_var);
10751 break;
10752 case COMPOUND_EXPR:
10753 /* Break out any preevaluations from cp_build_modify_expr. */
10754 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10755 expr = TREE_OPERAND (expr, 1))
10756 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10757 *expr_p = expr;
10758 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
10759 default:
10760 break;
10762 break;
10763 default:
10764 break;
10767 if (saw_lhs == 0)
10769 enum gimplify_status gs;
10770 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10771 if (gs != GS_ALL_DONE)
10772 saw_lhs = -1;
10775 return saw_lhs;
10778 /* Gimplify an OMP_ATOMIC statement. */
10780 static enum gimplify_status
10781 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
10783 tree addr = TREE_OPERAND (*expr_p, 0);
10784 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10785 ? NULL : TREE_OPERAND (*expr_p, 1);
10786 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
10787 tree tmp_load;
10788 gomp_atomic_load *loadstmt;
10789 gomp_atomic_store *storestmt;
10791 tmp_load = create_tmp_reg (type);
10792 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10793 return GS_ERROR;
10795 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10796 != GS_ALL_DONE)
10797 return GS_ERROR;
10799 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10800 gimplify_seq_add_stmt (pre_p, loadstmt);
10801 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10802 != GS_ALL_DONE)
10803 return GS_ERROR;
10805 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10806 rhs = tmp_load;
10807 storestmt = gimple_build_omp_atomic_store (rhs);
10808 gimplify_seq_add_stmt (pre_p, storestmt);
10809 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10811 gimple_omp_atomic_set_seq_cst (loadstmt);
10812 gimple_omp_atomic_set_seq_cst (storestmt);
10814 switch (TREE_CODE (*expr_p))
10816 case OMP_ATOMIC_READ:
10817 case OMP_ATOMIC_CAPTURE_OLD:
10818 *expr_p = tmp_load;
10819 gimple_omp_atomic_set_need_value (loadstmt);
10820 break;
10821 case OMP_ATOMIC_CAPTURE_NEW:
10822 *expr_p = rhs;
10823 gimple_omp_atomic_set_need_value (storestmt);
10824 break;
10825 default:
10826 *expr_p = NULL;
10827 break;
10830 return GS_ALL_DONE;
10833 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10834 body, and adding some EH bits. */
10836 static enum gimplify_status
10837 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10839 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10840 gimple *body_stmt;
10841 gtransaction *trans_stmt;
10842 gimple_seq body = NULL;
10843 int subcode = 0;
10845 /* Wrap the transaction body in a BIND_EXPR so we have a context
10846 where to put decls for OMP. */
10847 if (TREE_CODE (tbody) != BIND_EXPR)
10849 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10850 TREE_SIDE_EFFECTS (bind) = 1;
10851 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10852 TRANSACTION_EXPR_BODY (expr) = bind;
10855 push_gimplify_context ();
10856 temp = voidify_wrapper_expr (*expr_p, NULL);
10858 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10859 pop_gimplify_context (body_stmt);
10861 trans_stmt = gimple_build_transaction (body);
10862 if (TRANSACTION_EXPR_OUTER (expr))
10863 subcode = GTMA_IS_OUTER;
10864 else if (TRANSACTION_EXPR_RELAXED (expr))
10865 subcode = GTMA_IS_RELAXED;
10866 gimple_transaction_set_subcode (trans_stmt, subcode);
10868 gimplify_seq_add_stmt (pre_p, trans_stmt);
10870 if (temp)
10872 *expr_p = temp;
10873 return GS_OK;
10876 *expr_p = NULL_TREE;
10877 return GS_ALL_DONE;
10880 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10881 is the OMP_BODY of the original EXPR (which has already been
10882 gimplified so it's not present in the EXPR).
10884 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10886 static gimple *
10887 gimplify_omp_ordered (tree expr, gimple_seq body)
10889 tree c, decls;
10890 int failures = 0;
10891 unsigned int i;
10892 tree source_c = NULL_TREE;
10893 tree sink_c = NULL_TREE;
10895 if (gimplify_omp_ctxp)
10897 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10898 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10899 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10900 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
10901 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
10903 error_at (OMP_CLAUSE_LOCATION (c),
10904 "%<ordered%> construct with %<depend%> clause must be "
10905 "closely nested inside a loop with %<ordered%> clause "
10906 "with a parameter");
10907 failures++;
10909 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10910 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
10912 bool fail = false;
10913 for (decls = OMP_CLAUSE_DECL (c), i = 0;
10914 decls && TREE_CODE (decls) == TREE_LIST;
10915 decls = TREE_CHAIN (decls), ++i)
10916 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
10917 continue;
10918 else if (TREE_VALUE (decls)
10919 != gimplify_omp_ctxp->loop_iter_var[2 * i])
10921 error_at (OMP_CLAUSE_LOCATION (c),
10922 "variable %qE is not an iteration "
10923 "of outermost loop %d, expected %qE",
10924 TREE_VALUE (decls), i + 1,
10925 gimplify_omp_ctxp->loop_iter_var[2 * i]);
10926 fail = true;
10927 failures++;
10929 else
10930 TREE_VALUE (decls)
10931 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
10932 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
10934 error_at (OMP_CLAUSE_LOCATION (c),
10935 "number of variables in %<depend(sink)%> "
10936 "clause does not match number of "
10937 "iteration variables");
10938 failures++;
10940 sink_c = c;
10942 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10943 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
10945 if (source_c)
10947 error_at (OMP_CLAUSE_LOCATION (c),
10948 "more than one %<depend(source)%> clause on an "
10949 "%<ordered%> construct");
10950 failures++;
10952 else
10953 source_c = c;
10956 if (source_c && sink_c)
10958 error_at (OMP_CLAUSE_LOCATION (source_c),
10959 "%<depend(source)%> clause specified together with "
10960 "%<depend(sink:)%> clauses on the same construct");
10961 failures++;
10964 if (failures)
10965 return gimple_build_nop ();
10966 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
10969 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
10970 expression produces a value to be used as an operand inside a GIMPLE
10971 statement, the value will be stored back in *EXPR_P. This value will
10972 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
10973 an SSA_NAME. The corresponding sequence of GIMPLE statements is
10974 emitted in PRE_P and POST_P.
10976 Additionally, this process may overwrite parts of the input
10977 expression during gimplification. Ideally, it should be
10978 possible to do non-destructive gimplification.
10980 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
10981 the expression needs to evaluate to a value to be used as
10982 an operand in a GIMPLE statement, this value will be stored in
10983 *EXPR_P on exit. This happens when the caller specifies one
10984 of fb_lvalue or fb_rvalue fallback flags.
10986 PRE_P will contain the sequence of GIMPLE statements corresponding
10987 to the evaluation of EXPR and all the side-effects that must
10988 be executed before the main expression. On exit, the last
10989 statement of PRE_P is the core statement being gimplified. For
10990 instance, when gimplifying 'if (++a)' the last statement in
10991 PRE_P will be 'if (t.1)' where t.1 is the result of
10992 pre-incrementing 'a'.
10994 POST_P will contain the sequence of GIMPLE statements corresponding
10995 to the evaluation of all the side-effects that must be executed
10996 after the main expression. If this is NULL, the post
10997 side-effects are stored at the end of PRE_P.
10999 The reason why the output is split in two is to handle post
11000 side-effects explicitly. In some cases, an expression may have
11001 inner and outer post side-effects which need to be emitted in
11002 an order different from the one given by the recursive
11003 traversal. For instance, for the expression (*p--)++ the post
11004 side-effects of '--' must actually occur *after* the post
11005 side-effects of '++'. However, gimplification will first visit
11006 the inner expression, so if a separate POST sequence was not
11007 used, the resulting sequence would be:
11009 1 t.1 = *p
11010 2 p = p - 1
11011 3 t.2 = t.1 + 1
11012 4 *p = t.2
11014 However, the post-decrement operation in line #2 must not be
11015 evaluated until after the store to *p at line #4, so the
11016 correct sequence should be:
11018 1 t.1 = *p
11019 2 t.2 = t.1 + 1
11020 3 *p = t.2
11021 4 p = p - 1
11023 So, by specifying a separate post queue, it is possible
11024 to emit the post side-effects in the correct order.
11025 If POST_P is NULL, an internal queue will be used. Before
11026 returning to the caller, the sequence POST_P is appended to
11027 the main output sequence PRE_P.
11029 GIMPLE_TEST_F points to a function that takes a tree T and
11030 returns nonzero if T is in the GIMPLE form requested by the
11031 caller. The GIMPLE predicates are in gimple.c.
11033 FALLBACK tells the function what sort of a temporary we want if
11034 gimplification cannot produce an expression that complies with
11035 GIMPLE_TEST_F.
11037 fb_none means that no temporary should be generated
11038 fb_rvalue means that an rvalue is OK to generate
11039 fb_lvalue means that an lvalue is OK to generate
11040 fb_either means that either is OK, but an lvalue is preferable.
11041 fb_mayfail means that gimplification may fail (in which case
11042 GS_ERROR will be returned)
11044 The return value is either GS_ERROR or GS_ALL_DONE, since this
11045 function iterates until EXPR is completely gimplified or an error
11046 occurs. */
11048 enum gimplify_status
11049 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11050 bool (*gimple_test_f) (tree), fallback_t fallback)
11052 tree tmp;
11053 gimple_seq internal_pre = NULL;
11054 gimple_seq internal_post = NULL;
11055 tree save_expr;
11056 bool is_statement;
11057 location_t saved_location;
11058 enum gimplify_status ret;
11059 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
11060 tree label;
11062 save_expr = *expr_p;
11063 if (save_expr == NULL_TREE)
11064 return GS_ALL_DONE;
11066 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11067 is_statement = gimple_test_f == is_gimple_stmt;
11068 if (is_statement)
11069 gcc_assert (pre_p);
11071 /* Consistency checks. */
11072 if (gimple_test_f == is_gimple_reg)
11073 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11074 else if (gimple_test_f == is_gimple_val
11075 || gimple_test_f == is_gimple_call_addr
11076 || gimple_test_f == is_gimple_condexpr
11077 || gimple_test_f == is_gimple_mem_rhs
11078 || gimple_test_f == is_gimple_mem_rhs_or_call
11079 || gimple_test_f == is_gimple_reg_rhs
11080 || gimple_test_f == is_gimple_reg_rhs_or_call
11081 || gimple_test_f == is_gimple_asm_val
11082 || gimple_test_f == is_gimple_mem_ref_addr)
11083 gcc_assert (fallback & fb_rvalue);
11084 else if (gimple_test_f == is_gimple_min_lval
11085 || gimple_test_f == is_gimple_lvalue)
11086 gcc_assert (fallback & fb_lvalue);
11087 else if (gimple_test_f == is_gimple_addressable)
11088 gcc_assert (fallback & fb_either);
11089 else if (gimple_test_f == is_gimple_stmt)
11090 gcc_assert (fallback == fb_none);
11091 else
11093 /* We should have recognized the GIMPLE_TEST_F predicate to
11094 know what kind of fallback to use in case a temporary is
11095 needed to hold the value or address of *EXPR_P. */
11096 gcc_unreachable ();
11099 /* We used to check the predicate here and return immediately if it
11100 succeeds. This is wrong; the design is for gimplification to be
11101 idempotent, and for the predicates to only test for valid forms, not
11102 whether they are fully simplified. */
11103 if (pre_p == NULL)
11104 pre_p = &internal_pre;
11106 if (post_p == NULL)
11107 post_p = &internal_post;
11109 /* Remember the last statements added to PRE_P and POST_P. Every
11110 new statement added by the gimplification helpers needs to be
11111 annotated with location information. To centralize the
11112 responsibility, we remember the last statement that had been
11113 added to both queues before gimplifying *EXPR_P. If
11114 gimplification produces new statements in PRE_P and POST_P, those
11115 statements will be annotated with the same location information
11116 as *EXPR_P. */
11117 pre_last_gsi = gsi_last (*pre_p);
11118 post_last_gsi = gsi_last (*post_p);
11120 saved_location = input_location;
11121 if (save_expr != error_mark_node
11122 && EXPR_HAS_LOCATION (*expr_p))
11123 input_location = EXPR_LOCATION (*expr_p);
11125 /* Loop over the specific gimplifiers until the toplevel node
11126 remains the same. */
11129 /* Strip away as many useless type conversions as possible
11130 at the toplevel. */
11131 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11133 /* Remember the expr. */
11134 save_expr = *expr_p;
11136 /* Die, die, die, my darling. */
11137 if (save_expr == error_mark_node
11138 || (TREE_TYPE (save_expr)
11139 && TREE_TYPE (save_expr) == error_mark_node))
11141 ret = GS_ERROR;
11142 break;
11145 /* Do any language-specific gimplification. */
11146 ret = ((enum gimplify_status)
11147 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11148 if (ret == GS_OK)
11150 if (*expr_p == NULL_TREE)
11151 break;
11152 if (*expr_p != save_expr)
11153 continue;
11155 else if (ret != GS_UNHANDLED)
11156 break;
11158 /* Make sure that all the cases set 'ret' appropriately. */
11159 ret = GS_UNHANDLED;
11160 switch (TREE_CODE (*expr_p))
11162 /* First deal with the special cases. */
11164 case POSTINCREMENT_EXPR:
11165 case POSTDECREMENT_EXPR:
11166 case PREINCREMENT_EXPR:
11167 case PREDECREMENT_EXPR:
11168 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11169 fallback != fb_none,
11170 TREE_TYPE (*expr_p));
11171 break;
11173 case VIEW_CONVERT_EXPR:
11174 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11175 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11177 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11178 post_p, is_gimple_val, fb_rvalue);
11179 recalculate_side_effects (*expr_p);
11180 break;
11182 /* Fallthru. */
11184 case ARRAY_REF:
11185 case ARRAY_RANGE_REF:
11186 case REALPART_EXPR:
11187 case IMAGPART_EXPR:
11188 case COMPONENT_REF:
11189 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11190 fallback ? fallback : fb_rvalue);
11191 break;
11193 case COND_EXPR:
11194 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11196 /* C99 code may assign to an array in a structure value of a
11197 conditional expression, and this has undefined behavior
11198 only on execution, so create a temporary if an lvalue is
11199 required. */
11200 if (fallback == fb_lvalue)
11202 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11203 mark_addressable (*expr_p);
11204 ret = GS_OK;
11206 break;
11208 case CALL_EXPR:
11209 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11211 /* C99 code may assign to an array in a structure returned
11212 from a function, and this has undefined behavior only on
11213 execution, so create a temporary if an lvalue is
11214 required. */
11215 if (fallback == fb_lvalue)
11217 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11218 mark_addressable (*expr_p);
11219 ret = GS_OK;
11221 break;
11223 case TREE_LIST:
11224 gcc_unreachable ();
11226 case COMPOUND_EXPR:
11227 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11228 break;
11230 case COMPOUND_LITERAL_EXPR:
11231 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11232 gimple_test_f, fallback);
11233 break;
11235 case MODIFY_EXPR:
11236 case INIT_EXPR:
11237 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11238 fallback != fb_none);
11239 break;
11241 case TRUTH_ANDIF_EXPR:
11242 case TRUTH_ORIF_EXPR:
11244 /* Preserve the original type of the expression and the
11245 source location of the outer expression. */
11246 tree org_type = TREE_TYPE (*expr_p);
11247 *expr_p = gimple_boolify (*expr_p);
11248 *expr_p = build3_loc (input_location, COND_EXPR,
11249 org_type, *expr_p,
11250 fold_convert_loc
11251 (input_location,
11252 org_type, boolean_true_node),
11253 fold_convert_loc
11254 (input_location,
11255 org_type, boolean_false_node));
11256 ret = GS_OK;
11257 break;
11260 case TRUTH_NOT_EXPR:
11262 tree type = TREE_TYPE (*expr_p);
11263 /* The parsers are careful to generate TRUTH_NOT_EXPR
11264 only with operands that are always zero or one.
11265 We do not fold here but handle the only interesting case
11266 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11267 *expr_p = gimple_boolify (*expr_p);
11268 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11269 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11270 TREE_TYPE (*expr_p),
11271 TREE_OPERAND (*expr_p, 0));
11272 else
11273 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11274 TREE_TYPE (*expr_p),
11275 TREE_OPERAND (*expr_p, 0),
11276 build_int_cst (TREE_TYPE (*expr_p), 1));
11277 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11278 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11279 ret = GS_OK;
11280 break;
11283 case ADDR_EXPR:
11284 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11285 break;
11287 case ANNOTATE_EXPR:
11289 tree cond = TREE_OPERAND (*expr_p, 0);
11290 tree kind = TREE_OPERAND (*expr_p, 1);
11291 tree type = TREE_TYPE (cond);
11292 if (!INTEGRAL_TYPE_P (type))
11294 *expr_p = cond;
11295 ret = GS_OK;
11296 break;
11298 tree tmp = create_tmp_var (type);
11299 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11300 gcall *call
11301 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
11302 gimple_call_set_lhs (call, tmp);
11303 gimplify_seq_add_stmt (pre_p, call);
11304 *expr_p = tmp;
11305 ret = GS_ALL_DONE;
11306 break;
11309 case VA_ARG_EXPR:
11310 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11311 break;
11313 CASE_CONVERT:
11314 if (IS_EMPTY_STMT (*expr_p))
11316 ret = GS_ALL_DONE;
11317 break;
11320 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11321 || fallback == fb_none)
11323 /* Just strip a conversion to void (or in void context) and
11324 try again. */
11325 *expr_p = TREE_OPERAND (*expr_p, 0);
11326 ret = GS_OK;
11327 break;
11330 ret = gimplify_conversion (expr_p);
11331 if (ret == GS_ERROR)
11332 break;
11333 if (*expr_p != save_expr)
11334 break;
11335 /* FALLTHRU */
11337 case FIX_TRUNC_EXPR:
11338 /* unary_expr: ... | '(' cast ')' val | ... */
11339 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11340 is_gimple_val, fb_rvalue);
11341 recalculate_side_effects (*expr_p);
11342 break;
11344 case INDIRECT_REF:
11346 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11347 bool notrap = TREE_THIS_NOTRAP (*expr_p);
11348 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11350 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11351 if (*expr_p != save_expr)
11353 ret = GS_OK;
11354 break;
11357 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11358 is_gimple_reg, fb_rvalue);
11359 if (ret == GS_ERROR)
11360 break;
11362 recalculate_side_effects (*expr_p);
11363 *expr_p = fold_build2_loc (input_location, MEM_REF,
11364 TREE_TYPE (*expr_p),
11365 TREE_OPERAND (*expr_p, 0),
11366 build_int_cst (saved_ptr_type, 0));
11367 TREE_THIS_VOLATILE (*expr_p) = volatilep;
11368 TREE_THIS_NOTRAP (*expr_p) = notrap;
11369 ret = GS_OK;
11370 break;
11373 /* We arrive here through the various re-gimplifcation paths. */
11374 case MEM_REF:
11375 /* First try re-folding the whole thing. */
11376 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11377 TREE_OPERAND (*expr_p, 0),
11378 TREE_OPERAND (*expr_p, 1));
11379 if (tmp)
11381 REF_REVERSE_STORAGE_ORDER (tmp)
11382 = REF_REVERSE_STORAGE_ORDER (*expr_p);
11383 *expr_p = tmp;
11384 recalculate_side_effects (*expr_p);
11385 ret = GS_OK;
11386 break;
11388 /* Avoid re-gimplifying the address operand if it is already
11389 in suitable form. Re-gimplifying would mark the address
11390 operand addressable. Always gimplify when not in SSA form
11391 as we still may have to gimplify decls with value-exprs. */
11392 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
11393 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11395 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11396 is_gimple_mem_ref_addr, fb_rvalue);
11397 if (ret == GS_ERROR)
11398 break;
11400 recalculate_side_effects (*expr_p);
11401 ret = GS_ALL_DONE;
11402 break;
11404 /* Constants need not be gimplified. */
11405 case INTEGER_CST:
11406 case REAL_CST:
11407 case FIXED_CST:
11408 case STRING_CST:
11409 case COMPLEX_CST:
11410 case VECTOR_CST:
11411 /* Drop the overflow flag on constants, we do not want
11412 that in the GIMPLE IL. */
11413 if (TREE_OVERFLOW_P (*expr_p))
11414 *expr_p = drop_tree_overflow (*expr_p);
11415 ret = GS_ALL_DONE;
11416 break;
11418 case CONST_DECL:
11419 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11420 CONST_DECL node. Otherwise the decl is replaceable by its
11421 value. */
11422 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11423 if (fallback & fb_lvalue)
11424 ret = GS_ALL_DONE;
11425 else
11427 *expr_p = DECL_INITIAL (*expr_p);
11428 ret = GS_OK;
11430 break;
11432 case DECL_EXPR:
11433 ret = gimplify_decl_expr (expr_p, pre_p);
11434 break;
11436 case BIND_EXPR:
11437 ret = gimplify_bind_expr (expr_p, pre_p);
11438 break;
11440 case LOOP_EXPR:
11441 ret = gimplify_loop_expr (expr_p, pre_p);
11442 break;
11444 case SWITCH_EXPR:
11445 ret = gimplify_switch_expr (expr_p, pre_p);
11446 break;
11448 case EXIT_EXPR:
11449 ret = gimplify_exit_expr (expr_p);
11450 break;
11452 case GOTO_EXPR:
11453 /* If the target is not LABEL, then it is a computed jump
11454 and the target needs to be gimplified. */
11455 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11457 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11458 NULL, is_gimple_val, fb_rvalue);
11459 if (ret == GS_ERROR)
11460 break;
11462 gimplify_seq_add_stmt (pre_p,
11463 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11464 ret = GS_ALL_DONE;
11465 break;
11467 case PREDICT_EXPR:
11468 gimplify_seq_add_stmt (pre_p,
11469 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11470 PREDICT_EXPR_OUTCOME (*expr_p)));
11471 ret = GS_ALL_DONE;
11472 break;
11474 case LABEL_EXPR:
11475 ret = gimplify_label_expr (expr_p, pre_p);
11476 label = LABEL_EXPR_LABEL (*expr_p);
11477 gcc_assert (decl_function_context (label) == current_function_decl);
11479 /* If the label is used in a goto statement, or address of the label
11480 is taken, we need to unpoison all variables that were seen so far.
11481 Doing so would prevent us from reporting a false positives. */
11482 if (asan_poisoned_variables
11483 && asan_used_labels != NULL
11484 && asan_used_labels->contains (label))
11485 asan_poison_variables (asan_poisoned_variables, false, pre_p);
11486 break;
11488 case CASE_LABEL_EXPR:
11489 ret = gimplify_case_label_expr (expr_p, pre_p);
11491 if (gimplify_ctxp->live_switch_vars)
11492 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11493 pre_p);
11494 break;
11496 case RETURN_EXPR:
11497 ret = gimplify_return_expr (*expr_p, pre_p);
11498 break;
11500 case CONSTRUCTOR:
11501 /* Don't reduce this in place; let gimplify_init_constructor work its
11502 magic. Buf if we're just elaborating this for side effects, just
11503 gimplify any element that has side-effects. */
11504 if (fallback == fb_none)
11506 unsigned HOST_WIDE_INT ix;
11507 tree val;
11508 tree temp = NULL_TREE;
11509 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11510 if (TREE_SIDE_EFFECTS (val))
11511 append_to_statement_list (val, &temp);
11513 *expr_p = temp;
11514 ret = temp ? GS_OK : GS_ALL_DONE;
11516 /* C99 code may assign to an array in a constructed
11517 structure or union, and this has undefined behavior only
11518 on execution, so create a temporary if an lvalue is
11519 required. */
11520 else if (fallback == fb_lvalue)
11522 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11523 mark_addressable (*expr_p);
11524 ret = GS_OK;
11526 else
11527 ret = GS_ALL_DONE;
11528 break;
11530 /* The following are special cases that are not handled by the
11531 original GIMPLE grammar. */
11533 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11534 eliminated. */
11535 case SAVE_EXPR:
11536 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11537 break;
11539 case BIT_FIELD_REF:
11540 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11541 post_p, is_gimple_lvalue, fb_either);
11542 recalculate_side_effects (*expr_p);
11543 break;
11545 case TARGET_MEM_REF:
11547 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11549 if (TMR_BASE (*expr_p))
11550 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11551 post_p, is_gimple_mem_ref_addr, fb_either);
11552 if (TMR_INDEX (*expr_p))
11553 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11554 post_p, is_gimple_val, fb_rvalue);
11555 if (TMR_INDEX2 (*expr_p))
11556 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11557 post_p, is_gimple_val, fb_rvalue);
11558 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11559 ret = MIN (r0, r1);
11561 break;
11563 case NON_LVALUE_EXPR:
11564 /* This should have been stripped above. */
11565 gcc_unreachable ();
11567 case ASM_EXPR:
11568 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11569 break;
11571 case TRY_FINALLY_EXPR:
11572 case TRY_CATCH_EXPR:
11574 gimple_seq eval, cleanup;
11575 gtry *try_;
11577 /* Calls to destructors are generated automatically in FINALLY/CATCH
11578 block. They should have location as UNKNOWN_LOCATION. However,
11579 gimplify_call_expr will reset these call stmts to input_location
11580 if it finds stmt's location is unknown. To prevent resetting for
11581 destructors, we set the input_location to unknown.
11582 Note that this only affects the destructor calls in FINALLY/CATCH
11583 block, and will automatically reset to its original value by the
11584 end of gimplify_expr. */
11585 input_location = UNKNOWN_LOCATION;
11586 eval = cleanup = NULL;
11587 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11588 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11589 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11590 if (gimple_seq_empty_p (cleanup))
11592 gimple_seq_add_seq (pre_p, eval);
11593 ret = GS_ALL_DONE;
11594 break;
11596 try_ = gimple_build_try (eval, cleanup,
11597 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11598 ? GIMPLE_TRY_FINALLY
11599 : GIMPLE_TRY_CATCH);
11600 if (EXPR_HAS_LOCATION (save_expr))
11601 gimple_set_location (try_, EXPR_LOCATION (save_expr));
11602 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11603 gimple_set_location (try_, saved_location);
11604 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11605 gimple_try_set_catch_is_cleanup (try_,
11606 TRY_CATCH_IS_CLEANUP (*expr_p));
11607 gimplify_seq_add_stmt (pre_p, try_);
11608 ret = GS_ALL_DONE;
11609 break;
11612 case CLEANUP_POINT_EXPR:
11613 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11614 break;
11616 case TARGET_EXPR:
11617 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11618 break;
11620 case CATCH_EXPR:
11622 gimple *c;
11623 gimple_seq handler = NULL;
11624 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11625 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11626 gimplify_seq_add_stmt (pre_p, c);
11627 ret = GS_ALL_DONE;
11628 break;
11631 case EH_FILTER_EXPR:
11633 gimple *ehf;
11634 gimple_seq failure = NULL;
11636 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11637 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11638 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11639 gimplify_seq_add_stmt (pre_p, ehf);
11640 ret = GS_ALL_DONE;
11641 break;
11644 case OBJ_TYPE_REF:
11646 enum gimplify_status r0, r1;
11647 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11648 post_p, is_gimple_val, fb_rvalue);
11649 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11650 post_p, is_gimple_val, fb_rvalue);
11651 TREE_SIDE_EFFECTS (*expr_p) = 0;
11652 ret = MIN (r0, r1);
11654 break;
11656 case LABEL_DECL:
11657 /* We get here when taking the address of a label. We mark
11658 the label as "forced"; meaning it can never be removed and
11659 it is a potential target for any computed goto. */
11660 FORCED_LABEL (*expr_p) = 1;
11661 ret = GS_ALL_DONE;
11662 break;
11664 case STATEMENT_LIST:
11665 ret = gimplify_statement_list (expr_p, pre_p);
11666 break;
11668 case WITH_SIZE_EXPR:
11670 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11671 post_p == &internal_post ? NULL : post_p,
11672 gimple_test_f, fallback);
11673 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11674 is_gimple_val, fb_rvalue);
11675 ret = GS_ALL_DONE;
11677 break;
11679 case VAR_DECL:
11680 case PARM_DECL:
11681 ret = gimplify_var_or_parm_decl (expr_p);
11682 break;
11684 case RESULT_DECL:
11685 /* When within an OMP context, notice uses of variables. */
11686 if (gimplify_omp_ctxp)
11687 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
11688 ret = GS_ALL_DONE;
11689 break;
11691 case SSA_NAME:
11692 /* Allow callbacks into the gimplifier during optimization. */
11693 ret = GS_ALL_DONE;
11694 break;
11696 case OMP_PARALLEL:
11697 gimplify_omp_parallel (expr_p, pre_p);
11698 ret = GS_ALL_DONE;
11699 break;
11701 case OMP_TASK:
11702 gimplify_omp_task (expr_p, pre_p);
11703 ret = GS_ALL_DONE;
11704 break;
11706 case OMP_FOR:
11707 case OMP_SIMD:
11708 case CILK_SIMD:
11709 case CILK_FOR:
11710 case OMP_DISTRIBUTE:
11711 case OMP_TASKLOOP:
11712 case OACC_LOOP:
11713 ret = gimplify_omp_for (expr_p, pre_p);
11714 break;
11716 case OACC_CACHE:
11717 gimplify_oacc_cache (expr_p, pre_p);
11718 ret = GS_ALL_DONE;
11719 break;
11721 case OACC_DECLARE:
11722 gimplify_oacc_declare (expr_p, pre_p);
11723 ret = GS_ALL_DONE;
11724 break;
11726 case OACC_HOST_DATA:
11727 case OACC_DATA:
11728 case OACC_KERNELS:
11729 case OACC_PARALLEL:
11730 case OMP_SECTIONS:
11731 case OMP_SINGLE:
11732 case OMP_TARGET:
11733 case OMP_TARGET_DATA:
11734 case OMP_TEAMS:
11735 gimplify_omp_workshare (expr_p, pre_p);
11736 ret = GS_ALL_DONE;
11737 break;
11739 case OACC_ENTER_DATA:
11740 case OACC_EXIT_DATA:
11741 case OACC_UPDATE:
11742 case OMP_TARGET_UPDATE:
11743 case OMP_TARGET_ENTER_DATA:
11744 case OMP_TARGET_EXIT_DATA:
11745 gimplify_omp_target_update (expr_p, pre_p);
11746 ret = GS_ALL_DONE;
11747 break;
11749 case OMP_SECTION:
11750 case OMP_MASTER:
11751 case OMP_TASKGROUP:
11752 case OMP_ORDERED:
11753 case OMP_CRITICAL:
11755 gimple_seq body = NULL;
11756 gimple *g;
11758 gimplify_and_add (OMP_BODY (*expr_p), &body);
11759 switch (TREE_CODE (*expr_p))
11761 case OMP_SECTION:
11762 g = gimple_build_omp_section (body);
11763 break;
11764 case OMP_MASTER:
11765 g = gimple_build_omp_master (body);
11766 break;
11767 case OMP_TASKGROUP:
11769 gimple_seq cleanup = NULL;
11770 tree fn
11771 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11772 g = gimple_build_call (fn, 0);
11773 gimple_seq_add_stmt (&cleanup, g);
11774 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11775 body = NULL;
11776 gimple_seq_add_stmt (&body, g);
11777 g = gimple_build_omp_taskgroup (body);
11779 break;
11780 case OMP_ORDERED:
11781 g = gimplify_omp_ordered (*expr_p, body);
11782 break;
11783 case OMP_CRITICAL:
11784 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11785 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
11786 gimplify_adjust_omp_clauses (pre_p, body,
11787 &OMP_CRITICAL_CLAUSES (*expr_p),
11788 OMP_CRITICAL);
11789 g = gimple_build_omp_critical (body,
11790 OMP_CRITICAL_NAME (*expr_p),
11791 OMP_CRITICAL_CLAUSES (*expr_p));
11792 break;
11793 default:
11794 gcc_unreachable ();
11796 gimplify_seq_add_stmt (pre_p, g);
11797 ret = GS_ALL_DONE;
11798 break;
11801 case OMP_ATOMIC:
11802 case OMP_ATOMIC_READ:
11803 case OMP_ATOMIC_CAPTURE_OLD:
11804 case OMP_ATOMIC_CAPTURE_NEW:
11805 ret = gimplify_omp_atomic (expr_p, pre_p);
11806 break;
11808 case TRANSACTION_EXPR:
11809 ret = gimplify_transaction (expr_p, pre_p);
11810 break;
11812 case TRUTH_AND_EXPR:
11813 case TRUTH_OR_EXPR:
11814 case TRUTH_XOR_EXPR:
11816 tree orig_type = TREE_TYPE (*expr_p);
11817 tree new_type, xop0, xop1;
11818 *expr_p = gimple_boolify (*expr_p);
11819 new_type = TREE_TYPE (*expr_p);
11820 if (!useless_type_conversion_p (orig_type, new_type))
11822 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11823 ret = GS_OK;
11824 break;
11827 /* Boolified binary truth expressions are semantically equivalent
11828 to bitwise binary expressions. Canonicalize them to the
11829 bitwise variant. */
11830 switch (TREE_CODE (*expr_p))
11832 case TRUTH_AND_EXPR:
11833 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11834 break;
11835 case TRUTH_OR_EXPR:
11836 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11837 break;
11838 case TRUTH_XOR_EXPR:
11839 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11840 break;
11841 default:
11842 break;
11844 /* Now make sure that operands have compatible type to
11845 expression's new_type. */
11846 xop0 = TREE_OPERAND (*expr_p, 0);
11847 xop1 = TREE_OPERAND (*expr_p, 1);
11848 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11849 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11850 new_type,
11851 xop0);
11852 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11853 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11854 new_type,
11855 xop1);
11856 /* Continue classified as tcc_binary. */
11857 goto expr_2;
11860 case VEC_COND_EXPR:
11862 enum gimplify_status r0, r1, r2;
11864 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11865 post_p, is_gimple_condexpr, fb_rvalue);
11866 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11867 post_p, is_gimple_val, fb_rvalue);
11868 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11869 post_p, is_gimple_val, fb_rvalue);
11871 ret = MIN (MIN (r0, r1), r2);
11872 recalculate_side_effects (*expr_p);
11874 break;
11876 case FMA_EXPR:
11877 case VEC_PERM_EXPR:
11878 /* Classified as tcc_expression. */
11879 goto expr_3;
11881 case BIT_INSERT_EXPR:
11882 /* Argument 3 is a constant. */
11883 goto expr_2;
11885 case POINTER_PLUS_EXPR:
11887 enum gimplify_status r0, r1;
11888 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11889 post_p, is_gimple_val, fb_rvalue);
11890 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11891 post_p, is_gimple_val, fb_rvalue);
11892 recalculate_side_effects (*expr_p);
11893 ret = MIN (r0, r1);
11894 break;
11897 case CILK_SYNC_STMT:
11899 if (!fn_contains_cilk_spawn_p (cfun))
11901 error_at (EXPR_LOCATION (*expr_p),
11902 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
11903 ret = GS_ERROR;
11905 else
11907 gimplify_cilk_sync (expr_p, pre_p);
11908 ret = GS_ALL_DONE;
11910 break;
11913 default:
11914 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
11916 case tcc_comparison:
11917 /* Handle comparison of objects of non scalar mode aggregates
11918 with a call to memcmp. It would be nice to only have to do
11919 this for variable-sized objects, but then we'd have to allow
11920 the same nest of reference nodes we allow for MODIFY_EXPR and
11921 that's too complex.
11923 Compare scalar mode aggregates as scalar mode values. Using
11924 memcmp for them would be very inefficient at best, and is
11925 plain wrong if bitfields are involved. */
11927 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
11929 /* Vector comparisons need no boolification. */
11930 if (TREE_CODE (type) == VECTOR_TYPE)
11931 goto expr_2;
11932 else if (!AGGREGATE_TYPE_P (type))
11934 tree org_type = TREE_TYPE (*expr_p);
11935 *expr_p = gimple_boolify (*expr_p);
11936 if (!useless_type_conversion_p (org_type,
11937 TREE_TYPE (*expr_p)))
11939 *expr_p = fold_convert_loc (input_location,
11940 org_type, *expr_p);
11941 ret = GS_OK;
11943 else
11944 goto expr_2;
11946 else if (TYPE_MODE (type) != BLKmode)
11947 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
11948 else
11949 ret = gimplify_variable_sized_compare (expr_p);
11951 break;
11954 /* If *EXPR_P does not need to be special-cased, handle it
11955 according to its class. */
11956 case tcc_unary:
11957 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11958 post_p, is_gimple_val, fb_rvalue);
11959 break;
11961 case tcc_binary:
11962 expr_2:
11964 enum gimplify_status r0, r1;
11966 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11967 post_p, is_gimple_val, fb_rvalue);
11968 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11969 post_p, is_gimple_val, fb_rvalue);
11971 ret = MIN (r0, r1);
11972 break;
11975 expr_3:
11977 enum gimplify_status r0, r1, r2;
11979 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11980 post_p, is_gimple_val, fb_rvalue);
11981 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11982 post_p, is_gimple_val, fb_rvalue);
11983 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11984 post_p, is_gimple_val, fb_rvalue);
11986 ret = MIN (MIN (r0, r1), r2);
11987 break;
11990 case tcc_declaration:
11991 case tcc_constant:
11992 ret = GS_ALL_DONE;
11993 goto dont_recalculate;
11995 default:
11996 gcc_unreachable ();
11999 recalculate_side_effects (*expr_p);
12001 dont_recalculate:
12002 break;
12005 gcc_assert (*expr_p || ret != GS_OK);
12007 while (ret == GS_OK);
12009 /* If we encountered an error_mark somewhere nested inside, either
12010 stub out the statement or propagate the error back out. */
12011 if (ret == GS_ERROR)
12013 if (is_statement)
12014 *expr_p = NULL;
12015 goto out;
12018 /* This was only valid as a return value from the langhook, which
12019 we handled. Make sure it doesn't escape from any other context. */
12020 gcc_assert (ret != GS_UNHANDLED);
12022 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
12024 /* We aren't looking for a value, and we don't have a valid
12025 statement. If it doesn't have side-effects, throw it away.
12026 We can also get here with code such as "*&&L;", where L is
12027 a LABEL_DECL that is marked as FORCED_LABEL. */
12028 if (TREE_CODE (*expr_p) == LABEL_DECL
12029 || !TREE_SIDE_EFFECTS (*expr_p))
12030 *expr_p = NULL;
12031 else if (!TREE_THIS_VOLATILE (*expr_p))
12033 /* This is probably a _REF that contains something nested that
12034 has side effects. Recurse through the operands to find it. */
12035 enum tree_code code = TREE_CODE (*expr_p);
12037 switch (code)
12039 case COMPONENT_REF:
12040 case REALPART_EXPR:
12041 case IMAGPART_EXPR:
12042 case VIEW_CONVERT_EXPR:
12043 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12044 gimple_test_f, fallback);
12045 break;
12047 case ARRAY_REF:
12048 case ARRAY_RANGE_REF:
12049 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12050 gimple_test_f, fallback);
12051 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12052 gimple_test_f, fallback);
12053 break;
12055 default:
12056 /* Anything else with side-effects must be converted to
12057 a valid statement before we get here. */
12058 gcc_unreachable ();
12061 *expr_p = NULL;
12063 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12064 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
12066 /* Historically, the compiler has treated a bare reference
12067 to a non-BLKmode volatile lvalue as forcing a load. */
12068 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
12070 /* Normally, we do not want to create a temporary for a
12071 TREE_ADDRESSABLE type because such a type should not be
12072 copied by bitwise-assignment. However, we make an
12073 exception here, as all we are doing here is ensuring that
12074 we read the bytes that make up the type. We use
12075 create_tmp_var_raw because create_tmp_var will abort when
12076 given a TREE_ADDRESSABLE type. */
12077 tree tmp = create_tmp_var_raw (type, "vol");
12078 gimple_add_tmp_var (tmp);
12079 gimplify_assign (tmp, *expr_p, pre_p);
12080 *expr_p = NULL;
12082 else
12083 /* We can't do anything useful with a volatile reference to
12084 an incomplete type, so just throw it away. Likewise for
12085 a BLKmode type, since any implicit inner load should
12086 already have been turned into an explicit one by the
12087 gimplification process. */
12088 *expr_p = NULL;
12091 /* If we are gimplifying at the statement level, we're done. Tack
12092 everything together and return. */
12093 if (fallback == fb_none || is_statement)
12095 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12096 it out for GC to reclaim it. */
12097 *expr_p = NULL_TREE;
12099 if (!gimple_seq_empty_p (internal_pre)
12100 || !gimple_seq_empty_p (internal_post))
12102 gimplify_seq_add_seq (&internal_pre, internal_post);
12103 gimplify_seq_add_seq (pre_p, internal_pre);
12106 /* The result of gimplifying *EXPR_P is going to be the last few
12107 statements in *PRE_P and *POST_P. Add location information
12108 to all the statements that were added by the gimplification
12109 helpers. */
12110 if (!gimple_seq_empty_p (*pre_p))
12111 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12113 if (!gimple_seq_empty_p (*post_p))
12114 annotate_all_with_location_after (*post_p, post_last_gsi,
12115 input_location);
12117 goto out;
12120 #ifdef ENABLE_GIMPLE_CHECKING
12121 if (*expr_p)
12123 enum tree_code code = TREE_CODE (*expr_p);
12124 /* These expressions should already be in gimple IR form. */
12125 gcc_assert (code != MODIFY_EXPR
12126 && code != ASM_EXPR
12127 && code != BIND_EXPR
12128 && code != CATCH_EXPR
12129 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12130 && code != EH_FILTER_EXPR
12131 && code != GOTO_EXPR
12132 && code != LABEL_EXPR
12133 && code != LOOP_EXPR
12134 && code != SWITCH_EXPR
12135 && code != TRY_FINALLY_EXPR
12136 && code != OACC_PARALLEL
12137 && code != OACC_KERNELS
12138 && code != OACC_DATA
12139 && code != OACC_HOST_DATA
12140 && code != OACC_DECLARE
12141 && code != OACC_UPDATE
12142 && code != OACC_ENTER_DATA
12143 && code != OACC_EXIT_DATA
12144 && code != OACC_CACHE
12145 && code != OMP_CRITICAL
12146 && code != OMP_FOR
12147 && code != OACC_LOOP
12148 && code != OMP_MASTER
12149 && code != OMP_TASKGROUP
12150 && code != OMP_ORDERED
12151 && code != OMP_PARALLEL
12152 && code != OMP_SECTIONS
12153 && code != OMP_SECTION
12154 && code != OMP_SINGLE);
12156 #endif
12158 /* Otherwise we're gimplifying a subexpression, so the resulting
12159 value is interesting. If it's a valid operand that matches
12160 GIMPLE_TEST_F, we're done. Unless we are handling some
12161 post-effects internally; if that's the case, we need to copy into
12162 a temporary before adding the post-effects to POST_P. */
12163 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12164 goto out;
12166 /* Otherwise, we need to create a new temporary for the gimplified
12167 expression. */
12169 /* We can't return an lvalue if we have an internal postqueue. The
12170 object the lvalue refers to would (probably) be modified by the
12171 postqueue; we need to copy the value out first, which means an
12172 rvalue. */
12173 if ((fallback & fb_lvalue)
12174 && gimple_seq_empty_p (internal_post)
12175 && is_gimple_addressable (*expr_p))
12177 /* An lvalue will do. Take the address of the expression, store it
12178 in a temporary, and replace the expression with an INDIRECT_REF of
12179 that temporary. */
12180 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12181 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12182 *expr_p = build_simple_mem_ref (tmp);
12184 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12186 /* An rvalue will do. Assign the gimplified expression into a
12187 new temporary TMP and replace the original expression with
12188 TMP. First, make sure that the expression has a type so that
12189 it can be assigned into a temporary. */
12190 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12191 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12193 else
12195 #ifdef ENABLE_GIMPLE_CHECKING
12196 if (!(fallback & fb_mayfail))
12198 fprintf (stderr, "gimplification failed:\n");
12199 print_generic_expr (stderr, *expr_p, 0);
12200 debug_tree (*expr_p);
12201 internal_error ("gimplification failed");
12203 #endif
12204 gcc_assert (fallback & fb_mayfail);
12206 /* If this is an asm statement, and the user asked for the
12207 impossible, don't die. Fail and let gimplify_asm_expr
12208 issue an error. */
12209 ret = GS_ERROR;
12210 goto out;
12213 /* Make sure the temporary matches our predicate. */
12214 gcc_assert ((*gimple_test_f) (*expr_p));
12216 if (!gimple_seq_empty_p (internal_post))
12218 annotate_all_with_location (internal_post, input_location);
12219 gimplify_seq_add_seq (pre_p, internal_post);
12222 out:
12223 input_location = saved_location;
12224 return ret;
12227 /* Like gimplify_expr but make sure the gimplified result is not itself
12228 a SSA name (but a decl if it were). Temporaries required by
12229 evaluating *EXPR_P may be still SSA names. */
12231 static enum gimplify_status
12232 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12233 bool (*gimple_test_f) (tree), fallback_t fallback,
12234 bool allow_ssa)
12236 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12237 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12238 gimple_test_f, fallback);
12239 if (! allow_ssa
12240 && TREE_CODE (*expr_p) == SSA_NAME)
12242 tree name = *expr_p;
12243 if (was_ssa_name_p)
12244 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12245 else
12247 /* Avoid the extra copy if possible. */
12248 *expr_p = create_tmp_reg (TREE_TYPE (name));
12249 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12250 release_ssa_name (name);
12253 return ret;
12256 /* Look through TYPE for variable-sized objects and gimplify each such
12257 size that we find. Add to LIST_P any statements generated. */
12259 void
12260 gimplify_type_sizes (tree type, gimple_seq *list_p)
12262 tree field, t;
12264 if (type == NULL || type == error_mark_node)
12265 return;
12267 /* We first do the main variant, then copy into any other variants. */
12268 type = TYPE_MAIN_VARIANT (type);
12270 /* Avoid infinite recursion. */
12271 if (TYPE_SIZES_GIMPLIFIED (type))
12272 return;
12274 TYPE_SIZES_GIMPLIFIED (type) = 1;
12276 switch (TREE_CODE (type))
12278 case INTEGER_TYPE:
12279 case ENUMERAL_TYPE:
12280 case BOOLEAN_TYPE:
12281 case REAL_TYPE:
12282 case FIXED_POINT_TYPE:
12283 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12284 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12286 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12288 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12289 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12291 break;
12293 case ARRAY_TYPE:
12294 /* These types may not have declarations, so handle them here. */
12295 gimplify_type_sizes (TREE_TYPE (type), list_p);
12296 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12297 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12298 with assigned stack slots, for -O1+ -g they should be tracked
12299 by VTA. */
12300 if (!(TYPE_NAME (type)
12301 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12302 && DECL_IGNORED_P (TYPE_NAME (type)))
12303 && TYPE_DOMAIN (type)
12304 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12306 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12307 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12308 DECL_IGNORED_P (t) = 0;
12309 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12310 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12311 DECL_IGNORED_P (t) = 0;
12313 break;
12315 case RECORD_TYPE:
12316 case UNION_TYPE:
12317 case QUAL_UNION_TYPE:
12318 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12319 if (TREE_CODE (field) == FIELD_DECL)
12321 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12322 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12323 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12324 gimplify_type_sizes (TREE_TYPE (field), list_p);
12326 break;
12328 case POINTER_TYPE:
12329 case REFERENCE_TYPE:
12330 /* We used to recurse on the pointed-to type here, which turned out to
12331 be incorrect because its definition might refer to variables not
12332 yet initialized at this point if a forward declaration is involved.
12334 It was actually useful for anonymous pointed-to types to ensure
12335 that the sizes evaluation dominates every possible later use of the
12336 values. Restricting to such types here would be safe since there
12337 is no possible forward declaration around, but would introduce an
12338 undesirable middle-end semantic to anonymity. We then defer to
12339 front-ends the responsibility of ensuring that the sizes are
12340 evaluated both early and late enough, e.g. by attaching artificial
12341 type declarations to the tree. */
12342 break;
12344 default:
12345 break;
12348 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12349 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12351 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12353 TYPE_SIZE (t) = TYPE_SIZE (type);
12354 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12355 TYPE_SIZES_GIMPLIFIED (t) = 1;
12359 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12360 a size or position, has had all of its SAVE_EXPRs evaluated.
12361 We add any required statements to *STMT_P. */
12363 void
12364 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12366 tree expr = *expr_p;
12368 /* We don't do anything if the value isn't there, is constant, or contains
12369 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12370 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12371 will want to replace it with a new variable, but that will cause problems
12372 if this type is from outside the function. It's OK to have that here. */
12373 if (is_gimple_sizepos (expr))
12374 return;
12376 *expr_p = unshare_expr (expr);
12378 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12379 if the def vanishes. */
12380 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
12383 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12384 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12385 is true, also gimplify the parameters. */
12387 gbind *
12388 gimplify_body (tree fndecl, bool do_parms)
12390 location_t saved_location = input_location;
12391 gimple_seq parm_stmts, seq;
12392 gimple *outer_stmt;
12393 gbind *outer_bind;
12394 struct cgraph_node *cgn;
12396 timevar_push (TV_TREE_GIMPLIFY);
12398 init_tree_ssa (cfun);
12400 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12401 gimplification. */
12402 default_rtl_profile ();
12404 gcc_assert (gimplify_ctxp == NULL);
12405 push_gimplify_context (true);
12407 if (flag_openacc || flag_openmp)
12409 gcc_assert (gimplify_omp_ctxp == NULL);
12410 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12411 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12414 /* Unshare most shared trees in the body and in that of any nested functions.
12415 It would seem we don't have to do this for nested functions because
12416 they are supposed to be output and then the outer function gimplified
12417 first, but the g++ front end doesn't always do it that way. */
12418 unshare_body (fndecl);
12419 unvisit_body (fndecl);
12421 cgn = cgraph_node::get (fndecl);
12422 if (cgn && cgn->origin)
12423 nonlocal_vlas = new hash_set<tree>;
12425 /* Make sure input_location isn't set to something weird. */
12426 input_location = DECL_SOURCE_LOCATION (fndecl);
12428 /* Resolve callee-copies. This has to be done before processing
12429 the body so that DECL_VALUE_EXPR gets processed correctly. */
12430 parm_stmts = do_parms ? gimplify_parameters () : NULL;
12432 /* Gimplify the function's body. */
12433 seq = NULL;
12434 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12435 outer_stmt = gimple_seq_first_stmt (seq);
12436 if (!outer_stmt)
12438 outer_stmt = gimple_build_nop ();
12439 gimplify_seq_add_stmt (&seq, outer_stmt);
12442 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12443 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12444 if (gimple_code (outer_stmt) == GIMPLE_BIND
12445 && gimple_seq_first (seq) == gimple_seq_last (seq))
12446 outer_bind = as_a <gbind *> (outer_stmt);
12447 else
12448 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12450 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12452 /* If we had callee-copies statements, insert them at the beginning
12453 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12454 if (!gimple_seq_empty_p (parm_stmts))
12456 tree parm;
12458 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12459 gimple_bind_set_body (outer_bind, parm_stmts);
12461 for (parm = DECL_ARGUMENTS (current_function_decl);
12462 parm; parm = DECL_CHAIN (parm))
12463 if (DECL_HAS_VALUE_EXPR_P (parm))
12465 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12466 DECL_IGNORED_P (parm) = 0;
12470 if (nonlocal_vlas)
12472 if (nonlocal_vla_vars)
12474 /* tree-nested.c may later on call declare_vars (..., true);
12475 which relies on BLOCK_VARS chain to be the tail of the
12476 gimple_bind_vars chain. Ensure we don't violate that
12477 assumption. */
12478 if (gimple_bind_block (outer_bind)
12479 == DECL_INITIAL (current_function_decl))
12480 declare_vars (nonlocal_vla_vars, outer_bind, true);
12481 else
12482 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12483 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12484 nonlocal_vla_vars);
12485 nonlocal_vla_vars = NULL_TREE;
12487 delete nonlocal_vlas;
12488 nonlocal_vlas = NULL;
12491 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12492 && gimplify_omp_ctxp)
12494 delete_omp_context (gimplify_omp_ctxp);
12495 gimplify_omp_ctxp = NULL;
12498 pop_gimplify_context (outer_bind);
12499 gcc_assert (gimplify_ctxp == NULL);
12501 if (flag_checking && !seen_error ())
12502 verify_gimple_in_seq (gimple_bind_body (outer_bind));
12504 timevar_pop (TV_TREE_GIMPLIFY);
12505 input_location = saved_location;
12507 return outer_bind;
12510 typedef char *char_p; /* For DEF_VEC_P. */
12512 /* Return whether we should exclude FNDECL from instrumentation. */
12514 static bool
12515 flag_instrument_functions_exclude_p (tree fndecl)
12517 vec<char_p> *v;
12519 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12520 if (v && v->length () > 0)
12522 const char *name;
12523 int i;
12524 char *s;
12526 name = lang_hooks.decl_printable_name (fndecl, 0);
12527 FOR_EACH_VEC_ELT (*v, i, s)
12528 if (strstr (name, s) != NULL)
12529 return true;
12532 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12533 if (v && v->length () > 0)
12535 const char *name;
12536 int i;
12537 char *s;
12539 name = DECL_SOURCE_FILE (fndecl);
12540 FOR_EACH_VEC_ELT (*v, i, s)
12541 if (strstr (name, s) != NULL)
12542 return true;
12545 return false;
12548 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12549 node for the function we want to gimplify.
12551 Return the sequence of GIMPLE statements corresponding to the body
12552 of FNDECL. */
12554 void
12555 gimplify_function_tree (tree fndecl)
12557 tree parm, ret;
12558 gimple_seq seq;
12559 gbind *bind;
12561 gcc_assert (!gimple_body (fndecl));
12563 if (DECL_STRUCT_FUNCTION (fndecl))
12564 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12565 else
12566 push_struct_function (fndecl);
12568 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12569 if necessary. */
12570 cfun->curr_properties |= PROP_gimple_lva;
12572 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12574 /* Preliminarily mark non-addressed complex variables as eligible
12575 for promotion to gimple registers. We'll transform their uses
12576 as we find them. */
12577 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12578 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12579 && !TREE_THIS_VOLATILE (parm)
12580 && !needs_to_live_in_memory (parm))
12581 DECL_GIMPLE_REG_P (parm) = 1;
12584 ret = DECL_RESULT (fndecl);
12585 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12586 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12587 && !needs_to_live_in_memory (ret))
12588 DECL_GIMPLE_REG_P (ret) = 1;
12590 if (asan_sanitize_use_after_scope () && !asan_no_sanitize_address_p ())
12591 asan_poisoned_variables = new hash_set<tree> ();
12592 bind = gimplify_body (fndecl, true);
12593 if (asan_poisoned_variables)
12595 delete asan_poisoned_variables;
12596 asan_poisoned_variables = NULL;
12599 /* The tree body of the function is no longer needed, replace it
12600 with the new GIMPLE body. */
12601 seq = NULL;
12602 gimple_seq_add_stmt (&seq, bind);
12603 gimple_set_body (fndecl, seq);
12605 /* If we're instrumenting function entry/exit, then prepend the call to
12606 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12607 catch the exit hook. */
12608 /* ??? Add some way to ignore exceptions for this TFE. */
12609 if (flag_instrument_function_entry_exit
12610 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12611 /* Do not instrument extern inline functions. */
12612 && !(DECL_DECLARED_INLINE_P (fndecl)
12613 && DECL_EXTERNAL (fndecl)
12614 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12615 && !flag_instrument_functions_exclude_p (fndecl))
12617 tree x;
12618 gbind *new_bind;
12619 gimple *tf;
12620 gimple_seq cleanup = NULL, body = NULL;
12621 tree tmp_var;
12622 gcall *call;
12624 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12625 call = gimple_build_call (x, 1, integer_zero_node);
12626 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12627 gimple_call_set_lhs (call, tmp_var);
12628 gimplify_seq_add_stmt (&cleanup, call);
12629 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12630 call = gimple_build_call (x, 2,
12631 build_fold_addr_expr (current_function_decl),
12632 tmp_var);
12633 gimplify_seq_add_stmt (&cleanup, call);
12634 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12636 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12637 call = gimple_build_call (x, 1, integer_zero_node);
12638 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12639 gimple_call_set_lhs (call, tmp_var);
12640 gimplify_seq_add_stmt (&body, call);
12641 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12642 call = gimple_build_call (x, 2,
12643 build_fold_addr_expr (current_function_decl),
12644 tmp_var);
12645 gimplify_seq_add_stmt (&body, call);
12646 gimplify_seq_add_stmt (&body, tf);
12647 new_bind = gimple_build_bind (NULL, body, NULL);
12649 /* Replace the current function body with the body
12650 wrapped in the try/finally TF. */
12651 seq = NULL;
12652 gimple_seq_add_stmt (&seq, new_bind);
12653 gimple_set_body (fndecl, seq);
12654 bind = new_bind;
12657 if ((flag_sanitize & SANITIZE_THREAD) != 0
12658 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
12660 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
12661 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
12662 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
12663 /* Replace the current function body with the body
12664 wrapped in the try/finally TF. */
12665 seq = NULL;
12666 gimple_seq_add_stmt (&seq, new_bind);
12667 gimple_set_body (fndecl, seq);
12670 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12671 cfun->curr_properties |= PROP_gimple_any;
12673 pop_cfun ();
12675 dump_function (TDI_generic, fndecl);
12678 /* Return a dummy expression of type TYPE in order to keep going after an
12679 error. */
12681 static tree
12682 dummy_object (tree type)
12684 tree t = build_int_cst (build_pointer_type (type), 0);
12685 return build2 (MEM_REF, type, t, t);
12688 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12689 builtin function, but a very special sort of operator. */
12691 enum gimplify_status
12692 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
12693 gimple_seq *post_p ATTRIBUTE_UNUSED)
12695 tree promoted_type, have_va_type;
12696 tree valist = TREE_OPERAND (*expr_p, 0);
12697 tree type = TREE_TYPE (*expr_p);
12698 tree t, tag, aptag;
12699 location_t loc = EXPR_LOCATION (*expr_p);
12701 /* Verify that valist is of the proper type. */
12702 have_va_type = TREE_TYPE (valist);
12703 if (have_va_type == error_mark_node)
12704 return GS_ERROR;
12705 have_va_type = targetm.canonical_va_list_type (have_va_type);
12706 if (have_va_type == NULL_TREE
12707 && POINTER_TYPE_P (TREE_TYPE (valist)))
12708 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12709 have_va_type
12710 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
12711 gcc_assert (have_va_type != NULL_TREE);
12713 /* Generate a diagnostic for requesting data of a type that cannot
12714 be passed through `...' due to type promotion at the call site. */
12715 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
12716 != type)
12718 static bool gave_help;
12719 bool warned;
12720 /* Use the expansion point to handle cases such as passing bool (defined
12721 in a system header) through `...'. */
12722 source_location xloc
12723 = expansion_point_location_if_in_system_header (loc);
12725 /* Unfortunately, this is merely undefined, rather than a constraint
12726 violation, so we cannot make this an error. If this call is never
12727 executed, the program is still strictly conforming. */
12728 warned = warning_at (xloc, 0,
12729 "%qT is promoted to %qT when passed through %<...%>",
12730 type, promoted_type);
12731 if (!gave_help && warned)
12733 gave_help = true;
12734 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
12735 promoted_type, type);
12738 /* We can, however, treat "undefined" any way we please.
12739 Call abort to encourage the user to fix the program. */
12740 if (warned)
12741 inform (xloc, "if this code is reached, the program will abort");
12742 /* Before the abort, allow the evaluation of the va_list
12743 expression to exit or longjmp. */
12744 gimplify_and_add (valist, pre_p);
12745 t = build_call_expr_loc (loc,
12746 builtin_decl_implicit (BUILT_IN_TRAP), 0);
12747 gimplify_and_add (t, pre_p);
12749 /* This is dead code, but go ahead and finish so that the
12750 mode of the result comes out right. */
12751 *expr_p = dummy_object (type);
12752 return GS_ALL_DONE;
12755 tag = build_int_cst (build_pointer_type (type), 0);
12756 aptag = build_int_cst (TREE_TYPE (valist), 0);
12758 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12759 valist, tag, aptag);
12761 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12762 needs to be expanded. */
12763 cfun->curr_properties &= ~PROP_gimple_lva;
12765 return GS_OK;
12768 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12770 DST/SRC are the destination and source respectively. You can pass
12771 ungimplified trees in DST or SRC, in which case they will be
12772 converted to a gimple operand if necessary.
12774 This function returns the newly created GIMPLE_ASSIGN tuple. */
12776 gimple *
12777 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12779 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12780 gimplify_and_add (t, seq_p);
12781 ggc_free (t);
12782 return gimple_seq_last_stmt (*seq_p);
12785 inline hashval_t
12786 gimplify_hasher::hash (const elt_t *p)
12788 tree t = p->val;
12789 return iterative_hash_expr (t, 0);
12792 inline bool
12793 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
12795 tree t1 = p1->val;
12796 tree t2 = p2->val;
12797 enum tree_code code = TREE_CODE (t1);
12799 if (TREE_CODE (t2) != code
12800 || TREE_TYPE (t1) != TREE_TYPE (t2))
12801 return false;
12803 if (!operand_equal_p (t1, t2, 0))
12804 return false;
12806 /* Only allow them to compare equal if they also hash equal; otherwise
12807 results are nondeterminate, and we fail bootstrap comparison. */
12808 gcc_checking_assert (hash (p1) == hash (p2));
12810 return true;