PR tree-optimization/78496
[official-gcc.git] / gcc / gimplify.c
blobfd27eb1523f22f28f0c512d38a3ed54801438d0e
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa.h"
54 #include "omp-general.h"
55 #include "omp-low.h"
56 #include "gimple-low.h"
57 #include "cilk.h"
58 #include "gomp-constants.h"
59 #include "tree-dump.h"
60 #include "gimple-walk.h"
61 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
62 #include "builtins.h"
63 #include "asan.h"
64 #include "dbgcnt.h"
66 /* Hash set of poisoned variables in a bind expr. */
67 static hash_set<tree> *asan_poisoned_variables = NULL;
69 enum gimplify_omp_var_data
71 GOVD_SEEN = 1,
72 GOVD_EXPLICIT = 2,
73 GOVD_SHARED = 4,
74 GOVD_PRIVATE = 8,
75 GOVD_FIRSTPRIVATE = 16,
76 GOVD_LASTPRIVATE = 32,
77 GOVD_REDUCTION = 64,
78 GOVD_LOCAL = 128,
79 GOVD_MAP = 256,
80 GOVD_DEBUG_PRIVATE = 512,
81 GOVD_PRIVATE_OUTER_REF = 1024,
82 GOVD_LINEAR = 2048,
83 GOVD_ALIGNED = 4096,
85 /* Flag for GOVD_MAP: don't copy back. */
86 GOVD_MAP_TO_ONLY = 8192,
88 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
89 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
91 GOVD_MAP_0LEN_ARRAY = 32768,
93 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
94 GOVD_MAP_ALWAYS_TO = 65536,
96 /* Flag for shared vars that are or might be stored to in the region. */
97 GOVD_WRITTEN = 131072,
99 /* Flag for GOVD_MAP, if it is a forced mapping. */
100 GOVD_MAP_FORCE = 262144,
102 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
103 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
104 | GOVD_LOCAL)
108 enum omp_region_type
110 ORT_WORKSHARE = 0x00,
111 ORT_SIMD = 0x01,
113 ORT_PARALLEL = 0x02,
114 ORT_COMBINED_PARALLEL = 0x03,
116 ORT_TASK = 0x04,
117 ORT_UNTIED_TASK = 0x05,
119 ORT_TEAMS = 0x08,
120 ORT_COMBINED_TEAMS = 0x09,
122 /* Data region. */
123 ORT_TARGET_DATA = 0x10,
125 /* Data region with offloading. */
126 ORT_TARGET = 0x20,
127 ORT_COMBINED_TARGET = 0x21,
129 /* OpenACC variants. */
130 ORT_ACC = 0x40, /* A generic OpenACC region. */
131 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
132 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
133 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
134 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
136 /* Dummy OpenMP region, used to disable expansion of
137 DECL_VALUE_EXPRs in taskloop pre body. */
138 ORT_NONE = 0x100
141 /* Gimplify hashtable helper. */
143 struct gimplify_hasher : free_ptr_hash <elt_t>
145 static inline hashval_t hash (const elt_t *);
146 static inline bool equal (const elt_t *, const elt_t *);
149 struct gimplify_ctx
151 struct gimplify_ctx *prev_context;
153 vec<gbind *> bind_expr_stack;
154 tree temps;
155 gimple_seq conditional_cleanups;
156 tree exit_label;
157 tree return_temp;
159 vec<tree> case_labels;
160 hash_set<tree> *live_switch_vars;
161 /* The formal temporary table. Should this be persistent? */
162 hash_table<gimplify_hasher> *temp_htab;
164 int conditions;
165 unsigned into_ssa : 1;
166 unsigned allow_rhs_cond_expr : 1;
167 unsigned in_cleanup_point_expr : 1;
168 unsigned keep_stack : 1;
169 unsigned save_stack : 1;
170 unsigned in_switch_expr : 1;
173 struct gimplify_omp_ctx
175 struct gimplify_omp_ctx *outer_context;
176 splay_tree variables;
177 hash_set<tree> *privatized_types;
178 /* Iteration variables in an OMP_FOR. */
179 vec<tree> loop_iter_var;
180 location_t location;
181 enum omp_clause_default_kind default_kind;
182 enum omp_region_type region_type;
183 bool combined_loop;
184 bool distribute;
185 bool target_map_scalars_firstprivate;
186 bool target_map_pointers_as_0len_arrays;
187 bool target_firstprivatize_array_bases;
190 static struct gimplify_ctx *gimplify_ctxp;
191 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
193 /* Forward declaration. */
194 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
195 static hash_map<tree, tree> *oacc_declare_returns;
196 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
197 bool (*) (tree), fallback_t, bool);
199 /* Shorter alias name for the above function for use in gimplify.c
200 only. */
202 static inline void
203 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
205 gimple_seq_add_stmt_without_update (seq_p, gs);
208 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
209 NULL, a new sequence is allocated. This function is
210 similar to gimple_seq_add_seq, but does not scan the operands.
211 During gimplification, we need to manipulate statement sequences
212 before the def/use vectors have been constructed. */
214 static void
215 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
217 gimple_stmt_iterator si;
219 if (src == NULL)
220 return;
222 si = gsi_last (*dst_p);
223 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
227 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
228 and popping gimplify contexts. */
230 static struct gimplify_ctx *ctx_pool = NULL;
232 /* Return a gimplify context struct from the pool. */
234 static inline struct gimplify_ctx *
235 ctx_alloc (void)
237 struct gimplify_ctx * c = ctx_pool;
239 if (c)
240 ctx_pool = c->prev_context;
241 else
242 c = XNEW (struct gimplify_ctx);
244 memset (c, '\0', sizeof (*c));
245 return c;
248 /* Put gimplify context C back into the pool. */
250 static inline void
251 ctx_free (struct gimplify_ctx *c)
253 c->prev_context = ctx_pool;
254 ctx_pool = c;
257 /* Free allocated ctx stack memory. */
259 void
260 free_gimplify_stack (void)
262 struct gimplify_ctx *c;
264 while ((c = ctx_pool))
266 ctx_pool = c->prev_context;
267 free (c);
272 /* Set up a context for the gimplifier. */
274 void
275 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
277 struct gimplify_ctx *c = ctx_alloc ();
279 c->prev_context = gimplify_ctxp;
280 gimplify_ctxp = c;
281 gimplify_ctxp->into_ssa = in_ssa;
282 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
285 /* Tear down a context for the gimplifier. If BODY is non-null, then
286 put the temporaries into the outer BIND_EXPR. Otherwise, put them
287 in the local_decls.
289 BODY is not a sequence, but the first tuple in a sequence. */
291 void
292 pop_gimplify_context (gimple *body)
294 struct gimplify_ctx *c = gimplify_ctxp;
296 gcc_assert (c
297 && (!c->bind_expr_stack.exists ()
298 || c->bind_expr_stack.is_empty ()));
299 c->bind_expr_stack.release ();
300 gimplify_ctxp = c->prev_context;
302 if (body)
303 declare_vars (c->temps, body, false);
304 else
305 record_vars (c->temps);
307 delete c->temp_htab;
308 c->temp_htab = NULL;
309 ctx_free (c);
312 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
314 static void
315 gimple_push_bind_expr (gbind *bind_stmt)
317 gimplify_ctxp->bind_expr_stack.reserve (8);
318 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
321 /* Pop the first element off the stack of bindings. */
323 static void
324 gimple_pop_bind_expr (void)
326 gimplify_ctxp->bind_expr_stack.pop ();
329 /* Return the first element of the stack of bindings. */
331 gbind *
332 gimple_current_bind_expr (void)
334 return gimplify_ctxp->bind_expr_stack.last ();
337 /* Return the stack of bindings created during gimplification. */
339 vec<gbind *>
340 gimple_bind_expr_stack (void)
342 return gimplify_ctxp->bind_expr_stack;
345 /* Return true iff there is a COND_EXPR between us and the innermost
346 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
348 static bool
349 gimple_conditional_context (void)
351 return gimplify_ctxp->conditions > 0;
354 /* Note that we've entered a COND_EXPR. */
356 static void
357 gimple_push_condition (void)
359 #ifdef ENABLE_GIMPLE_CHECKING
360 if (gimplify_ctxp->conditions == 0)
361 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
362 #endif
363 ++(gimplify_ctxp->conditions);
366 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
367 now, add any conditional cleanups we've seen to the prequeue. */
369 static void
370 gimple_pop_condition (gimple_seq *pre_p)
372 int conds = --(gimplify_ctxp->conditions);
374 gcc_assert (conds >= 0);
375 if (conds == 0)
377 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
378 gimplify_ctxp->conditional_cleanups = NULL;
382 /* A stable comparison routine for use with splay trees and DECLs. */
384 static int
385 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
387 tree a = (tree) xa;
388 tree b = (tree) xb;
390 return DECL_UID (a) - DECL_UID (b);
393 /* Create a new omp construct that deals with variable remapping. */
395 static struct gimplify_omp_ctx *
396 new_omp_context (enum omp_region_type region_type)
398 struct gimplify_omp_ctx *c;
400 c = XCNEW (struct gimplify_omp_ctx);
401 c->outer_context = gimplify_omp_ctxp;
402 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
403 c->privatized_types = new hash_set<tree>;
404 c->location = input_location;
405 c->region_type = region_type;
406 if ((region_type & ORT_TASK) == 0)
407 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
408 else
409 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
411 return c;
414 /* Destroy an omp construct that deals with variable remapping. */
416 static void
417 delete_omp_context (struct gimplify_omp_ctx *c)
419 splay_tree_delete (c->variables);
420 delete c->privatized_types;
421 c->loop_iter_var.release ();
422 XDELETE (c);
425 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
426 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
428 /* Both gimplify the statement T and append it to *SEQ_P. This function
429 behaves exactly as gimplify_stmt, but you don't have to pass T as a
430 reference. */
432 void
433 gimplify_and_add (tree t, gimple_seq *seq_p)
435 gimplify_stmt (&t, seq_p);
438 /* Gimplify statement T into sequence *SEQ_P, and return the first
439 tuple in the sequence of generated tuples for this statement.
440 Return NULL if gimplifying T produced no tuples. */
442 static gimple *
443 gimplify_and_return_first (tree t, gimple_seq *seq_p)
445 gimple_stmt_iterator last = gsi_last (*seq_p);
447 gimplify_and_add (t, seq_p);
449 if (!gsi_end_p (last))
451 gsi_next (&last);
452 return gsi_stmt (last);
454 else
455 return gimple_seq_first_stmt (*seq_p);
458 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
459 LHS, or for a call argument. */
461 static bool
462 is_gimple_mem_rhs (tree t)
464 /* If we're dealing with a renamable type, either source or dest must be
465 a renamed variable. */
466 if (is_gimple_reg_type (TREE_TYPE (t)))
467 return is_gimple_val (t);
468 else
469 return is_gimple_val (t) || is_gimple_lvalue (t);
472 /* Return true if T is a CALL_EXPR or an expression that can be
473 assigned to a temporary. Note that this predicate should only be
474 used during gimplification. See the rationale for this in
475 gimplify_modify_expr. */
477 static bool
478 is_gimple_reg_rhs_or_call (tree t)
480 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
481 || TREE_CODE (t) == CALL_EXPR);
484 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
485 this predicate should only be used during gimplification. See the
486 rationale for this in gimplify_modify_expr. */
488 static bool
489 is_gimple_mem_rhs_or_call (tree t)
491 /* If we're dealing with a renamable type, either source or dest must be
492 a renamed variable. */
493 if (is_gimple_reg_type (TREE_TYPE (t)))
494 return is_gimple_val (t);
495 else
496 return (is_gimple_val (t)
497 || is_gimple_lvalue (t)
498 || TREE_CLOBBER_P (t)
499 || TREE_CODE (t) == CALL_EXPR);
502 /* Create a temporary with a name derived from VAL. Subroutine of
503 lookup_tmp_var; nobody else should call this function. */
505 static inline tree
506 create_tmp_from_val (tree val)
508 /* Drop all qualifiers and address-space information from the value type. */
509 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
510 tree var = create_tmp_var (type, get_name (val));
511 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
512 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
513 DECL_GIMPLE_REG_P (var) = 1;
514 return var;
517 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
518 an existing expression temporary. */
520 static tree
521 lookup_tmp_var (tree val, bool is_formal)
523 tree ret;
525 /* If not optimizing, never really reuse a temporary. local-alloc
526 won't allocate any variable that is used in more than one basic
527 block, which means it will go into memory, causing much extra
528 work in reload and final and poorer code generation, outweighing
529 the extra memory allocation here. */
530 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
531 ret = create_tmp_from_val (val);
532 else
534 elt_t elt, *elt_p;
535 elt_t **slot;
537 elt.val = val;
538 if (!gimplify_ctxp->temp_htab)
539 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
540 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
541 if (*slot == NULL)
543 elt_p = XNEW (elt_t);
544 elt_p->val = val;
545 elt_p->temp = ret = create_tmp_from_val (val);
546 *slot = elt_p;
548 else
550 elt_p = *slot;
551 ret = elt_p->temp;
555 return ret;
558 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
560 static tree
561 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
562 bool is_formal, bool allow_ssa)
564 tree t, mod;
566 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
567 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
568 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
569 fb_rvalue);
571 if (allow_ssa
572 && gimplify_ctxp->into_ssa
573 && is_gimple_reg_type (TREE_TYPE (val)))
575 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
576 if (! gimple_in_ssa_p (cfun))
578 const char *name = get_name (val);
579 if (name)
580 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
583 else
584 t = lookup_tmp_var (val, is_formal);
586 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
588 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
590 /* gimplify_modify_expr might want to reduce this further. */
591 gimplify_and_add (mod, pre_p);
592 ggc_free (mod);
594 return t;
597 /* Return a formal temporary variable initialized with VAL. PRE_P is as
598 in gimplify_expr. Only use this function if:
600 1) The value of the unfactored expression represented by VAL will not
601 change between the initialization and use of the temporary, and
602 2) The temporary will not be otherwise modified.
604 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
605 and #2 means it is inappropriate for && temps.
607 For other cases, use get_initialized_tmp_var instead. */
609 tree
610 get_formal_tmp_var (tree val, gimple_seq *pre_p)
612 return internal_get_tmp_var (val, pre_p, NULL, true, true);
615 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
616 are as in gimplify_expr. */
618 tree
619 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
620 bool allow_ssa)
622 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
625 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
626 generate debug info for them; otherwise don't. */
628 void
629 declare_vars (tree vars, gimple *gs, bool debug_info)
631 tree last = vars;
632 if (last)
634 tree temps, block;
636 gbind *scope = as_a <gbind *> (gs);
638 temps = nreverse (last);
640 block = gimple_bind_block (scope);
641 gcc_assert (!block || TREE_CODE (block) == BLOCK);
642 if (!block || !debug_info)
644 DECL_CHAIN (last) = gimple_bind_vars (scope);
645 gimple_bind_set_vars (scope, temps);
647 else
649 /* We need to attach the nodes both to the BIND_EXPR and to its
650 associated BLOCK for debugging purposes. The key point here
651 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
652 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
653 if (BLOCK_VARS (block))
654 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
655 else
657 gimple_bind_set_vars (scope,
658 chainon (gimple_bind_vars (scope), temps));
659 BLOCK_VARS (block) = temps;
665 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
666 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
667 no such upper bound can be obtained. */
669 static void
670 force_constant_size (tree var)
672 /* The only attempt we make is by querying the maximum size of objects
673 of the variable's type. */
675 HOST_WIDE_INT max_size;
677 gcc_assert (VAR_P (var));
679 max_size = max_int_size_in_bytes (TREE_TYPE (var));
681 gcc_assert (max_size >= 0);
683 DECL_SIZE_UNIT (var)
684 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
685 DECL_SIZE (var)
686 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
689 /* Push the temporary variable TMP into the current binding. */
691 void
692 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
694 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
696 /* Later processing assumes that the object size is constant, which might
697 not be true at this point. Force the use of a constant upper bound in
698 this case. */
699 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
700 force_constant_size (tmp);
702 DECL_CONTEXT (tmp) = fn->decl;
703 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
705 record_vars_into (tmp, fn->decl);
708 /* Push the temporary variable TMP into the current binding. */
710 void
711 gimple_add_tmp_var (tree tmp)
713 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
715 /* Later processing assumes that the object size is constant, which might
716 not be true at this point. Force the use of a constant upper bound in
717 this case. */
718 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
719 force_constant_size (tmp);
721 DECL_CONTEXT (tmp) = current_function_decl;
722 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
724 if (gimplify_ctxp)
726 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
727 gimplify_ctxp->temps = tmp;
729 /* Mark temporaries local within the nearest enclosing parallel. */
730 if (gimplify_omp_ctxp)
732 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
733 while (ctx
734 && (ctx->region_type == ORT_WORKSHARE
735 || ctx->region_type == ORT_SIMD
736 || ctx->region_type == ORT_ACC))
737 ctx = ctx->outer_context;
738 if (ctx)
739 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
742 else if (cfun)
743 record_vars (tmp);
744 else
746 gimple_seq body_seq;
748 /* This case is for nested functions. We need to expose the locals
749 they create. */
750 body_seq = gimple_body (current_function_decl);
751 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
757 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
758 nodes that are referenced more than once in GENERIC functions. This is
759 necessary because gimplification (translation into GIMPLE) is performed
760 by modifying tree nodes in-place, so gimplication of a shared node in a
761 first context could generate an invalid GIMPLE form in a second context.
763 This is achieved with a simple mark/copy/unmark algorithm that walks the
764 GENERIC representation top-down, marks nodes with TREE_VISITED the first
765 time it encounters them, duplicates them if they already have TREE_VISITED
766 set, and finally removes the TREE_VISITED marks it has set.
768 The algorithm works only at the function level, i.e. it generates a GENERIC
769 representation of a function with no nodes shared within the function when
770 passed a GENERIC function (except for nodes that are allowed to be shared).
772 At the global level, it is also necessary to unshare tree nodes that are
773 referenced in more than one function, for the same aforementioned reason.
774 This requires some cooperation from the front-end. There are 2 strategies:
776 1. Manual unsharing. The front-end needs to call unshare_expr on every
777 expression that might end up being shared across functions.
779 2. Deep unsharing. This is an extension of regular unsharing. Instead
780 of calling unshare_expr on expressions that might be shared across
781 functions, the front-end pre-marks them with TREE_VISITED. This will
782 ensure that they are unshared on the first reference within functions
783 when the regular unsharing algorithm runs. The counterpart is that
784 this algorithm must look deeper than for manual unsharing, which is
785 specified by LANG_HOOKS_DEEP_UNSHARING.
787 If there are only few specific cases of node sharing across functions, it is
788 probably easier for a front-end to unshare the expressions manually. On the
789 contrary, if the expressions generated at the global level are as widespread
790 as expressions generated within functions, deep unsharing is very likely the
791 way to go. */
793 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
794 These nodes model computations that must be done once. If we were to
795 unshare something like SAVE_EXPR(i++), the gimplification process would
796 create wrong code. However, if DATA is non-null, it must hold a pointer
797 set that is used to unshare the subtrees of these nodes. */
799 static tree
800 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
802 tree t = *tp;
803 enum tree_code code = TREE_CODE (t);
805 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
806 copy their subtrees if we can make sure to do it only once. */
807 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
809 if (data && !((hash_set<tree> *)data)->add (t))
811 else
812 *walk_subtrees = 0;
815 /* Stop at types, decls, constants like copy_tree_r. */
816 else if (TREE_CODE_CLASS (code) == tcc_type
817 || TREE_CODE_CLASS (code) == tcc_declaration
818 || TREE_CODE_CLASS (code) == tcc_constant
819 /* We can't do anything sensible with a BLOCK used as an
820 expression, but we also can't just die when we see it
821 because of non-expression uses. So we avert our eyes
822 and cross our fingers. Silly Java. */
823 || code == BLOCK)
824 *walk_subtrees = 0;
826 /* Cope with the statement expression extension. */
827 else if (code == STATEMENT_LIST)
830 /* Leave the bulk of the work to copy_tree_r itself. */
831 else
832 copy_tree_r (tp, walk_subtrees, NULL);
834 return NULL_TREE;
837 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
838 If *TP has been visited already, then *TP is deeply copied by calling
839 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
841 static tree
842 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
844 tree t = *tp;
845 enum tree_code code = TREE_CODE (t);
847 /* Skip types, decls, and constants. But we do want to look at their
848 types and the bounds of types. Mark them as visited so we properly
849 unmark their subtrees on the unmark pass. If we've already seen them,
850 don't look down further. */
851 if (TREE_CODE_CLASS (code) == tcc_type
852 || TREE_CODE_CLASS (code) == tcc_declaration
853 || TREE_CODE_CLASS (code) == tcc_constant)
855 if (TREE_VISITED (t))
856 *walk_subtrees = 0;
857 else
858 TREE_VISITED (t) = 1;
861 /* If this node has been visited already, unshare it and don't look
862 any deeper. */
863 else if (TREE_VISITED (t))
865 walk_tree (tp, mostly_copy_tree_r, data, NULL);
866 *walk_subtrees = 0;
869 /* Otherwise, mark the node as visited and keep looking. */
870 else
871 TREE_VISITED (t) = 1;
873 return NULL_TREE;
876 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
877 copy_if_shared_r callback unmodified. */
879 static inline void
880 copy_if_shared (tree *tp, void *data)
882 walk_tree (tp, copy_if_shared_r, data, NULL);
885 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
886 any nested functions. */
888 static void
889 unshare_body (tree fndecl)
891 struct cgraph_node *cgn = cgraph_node::get (fndecl);
892 /* If the language requires deep unsharing, we need a pointer set to make
893 sure we don't repeatedly unshare subtrees of unshareable nodes. */
894 hash_set<tree> *visited
895 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
897 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
898 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
899 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
901 delete visited;
903 if (cgn)
904 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
905 unshare_body (cgn->decl);
908 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
909 Subtrees are walked until the first unvisited node is encountered. */
911 static tree
912 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
914 tree t = *tp;
916 /* If this node has been visited, unmark it and keep looking. */
917 if (TREE_VISITED (t))
918 TREE_VISITED (t) = 0;
920 /* Otherwise, don't look any deeper. */
921 else
922 *walk_subtrees = 0;
924 return NULL_TREE;
927 /* Unmark the visited trees rooted at *TP. */
929 static inline void
930 unmark_visited (tree *tp)
932 walk_tree (tp, unmark_visited_r, NULL, NULL);
935 /* Likewise, but mark all trees as not visited. */
937 static void
938 unvisit_body (tree fndecl)
940 struct cgraph_node *cgn = cgraph_node::get (fndecl);
942 unmark_visited (&DECL_SAVED_TREE (fndecl));
943 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
944 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
946 if (cgn)
947 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
948 unvisit_body (cgn->decl);
951 /* Unconditionally make an unshared copy of EXPR. This is used when using
952 stored expressions which span multiple functions, such as BINFO_VTABLE,
953 as the normal unsharing process can't tell that they're shared. */
955 tree
956 unshare_expr (tree expr)
958 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
959 return expr;
962 /* Worker for unshare_expr_without_location. */
964 static tree
965 prune_expr_location (tree *tp, int *walk_subtrees, void *)
967 if (EXPR_P (*tp))
968 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
969 else
970 *walk_subtrees = 0;
971 return NULL_TREE;
974 /* Similar to unshare_expr but also prune all expression locations
975 from EXPR. */
977 tree
978 unshare_expr_without_location (tree expr)
980 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
981 if (EXPR_P (expr))
982 walk_tree (&expr, prune_expr_location, NULL, NULL);
983 return expr;
986 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
987 contain statements and have a value. Assign its value to a temporary
988 and give it void_type_node. Return the temporary, or NULL_TREE if
989 WRAPPER was already void. */
991 tree
992 voidify_wrapper_expr (tree wrapper, tree temp)
994 tree type = TREE_TYPE (wrapper);
995 if (type && !VOID_TYPE_P (type))
997 tree *p;
999 /* Set p to point to the body of the wrapper. Loop until we find
1000 something that isn't a wrapper. */
1001 for (p = &wrapper; p && *p; )
1003 switch (TREE_CODE (*p))
1005 case BIND_EXPR:
1006 TREE_SIDE_EFFECTS (*p) = 1;
1007 TREE_TYPE (*p) = void_type_node;
1008 /* For a BIND_EXPR, the body is operand 1. */
1009 p = &BIND_EXPR_BODY (*p);
1010 break;
1012 case CLEANUP_POINT_EXPR:
1013 case TRY_FINALLY_EXPR:
1014 case TRY_CATCH_EXPR:
1015 TREE_SIDE_EFFECTS (*p) = 1;
1016 TREE_TYPE (*p) = void_type_node;
1017 p = &TREE_OPERAND (*p, 0);
1018 break;
1020 case STATEMENT_LIST:
1022 tree_stmt_iterator i = tsi_last (*p);
1023 TREE_SIDE_EFFECTS (*p) = 1;
1024 TREE_TYPE (*p) = void_type_node;
1025 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1027 break;
1029 case COMPOUND_EXPR:
1030 /* Advance to the last statement. Set all container types to
1031 void. */
1032 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1034 TREE_SIDE_EFFECTS (*p) = 1;
1035 TREE_TYPE (*p) = void_type_node;
1037 break;
1039 case TRANSACTION_EXPR:
1040 TREE_SIDE_EFFECTS (*p) = 1;
1041 TREE_TYPE (*p) = void_type_node;
1042 p = &TRANSACTION_EXPR_BODY (*p);
1043 break;
1045 default:
1046 /* Assume that any tree upon which voidify_wrapper_expr is
1047 directly called is a wrapper, and that its body is op0. */
1048 if (p == &wrapper)
1050 TREE_SIDE_EFFECTS (*p) = 1;
1051 TREE_TYPE (*p) = void_type_node;
1052 p = &TREE_OPERAND (*p, 0);
1053 break;
1055 goto out;
1059 out:
1060 if (p == NULL || IS_EMPTY_STMT (*p))
1061 temp = NULL_TREE;
1062 else if (temp)
1064 /* The wrapper is on the RHS of an assignment that we're pushing
1065 down. */
1066 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1067 || TREE_CODE (temp) == MODIFY_EXPR);
1068 TREE_OPERAND (temp, 1) = *p;
1069 *p = temp;
1071 else
1073 temp = create_tmp_var (type, "retval");
1074 *p = build2 (INIT_EXPR, type, temp, *p);
1077 return temp;
1080 return NULL_TREE;
1083 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1084 a temporary through which they communicate. */
1086 static void
1087 build_stack_save_restore (gcall **save, gcall **restore)
1089 tree tmp_var;
1091 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1092 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1093 gimple_call_set_lhs (*save, tmp_var);
1095 *restore
1096 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1097 1, tmp_var);
1100 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1102 static tree
1103 build_asan_poison_call_expr (tree decl)
1105 /* Do not poison variables that have size equal to zero. */
1106 tree unit_size = DECL_SIZE_UNIT (decl);
1107 if (zerop (unit_size))
1108 return NULL_TREE;
1110 tree base = build_fold_addr_expr (decl);
1112 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1113 void_type_node, 3,
1114 build_int_cst (integer_type_node,
1115 ASAN_MARK_POISON),
1116 base, unit_size);
1119 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1120 on POISON flag, shadow memory of a DECL variable. The call will be
1121 put on location identified by IT iterator, where BEFORE flag drives
1122 position where the stmt will be put. */
1124 static void
1125 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1126 bool before)
1128 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1129 if (gimplify_omp_ctxp)
1130 return;
1132 tree unit_size = DECL_SIZE_UNIT (decl);
1133 tree base = build_fold_addr_expr (decl);
1135 /* Do not poison variables that have size equal to zero. */
1136 if (zerop (unit_size))
1137 return;
1139 /* It's necessary to have all stack variables aligned to ASAN granularity
1140 bytes. */
1141 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1142 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1144 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1146 gimple *g
1147 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1148 build_int_cst (integer_type_node, flags),
1149 base, unit_size);
1151 if (before)
1152 gsi_insert_before (it, g, GSI_NEW_STMT);
1153 else
1154 gsi_insert_after (it, g, GSI_NEW_STMT);
1157 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1158 either poisons or unpoisons a DECL. Created statement is appended
1159 to SEQ_P gimple sequence. */
1161 static void
1162 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1164 gimple_stmt_iterator it = gsi_last (*seq_p);
1165 bool before = false;
1167 if (gsi_end_p (it))
1168 before = true;
1170 asan_poison_variable (decl, poison, &it, before);
1173 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1175 static int
1176 sort_by_decl_uid (const void *a, const void *b)
1178 const tree *t1 = (const tree *)a;
1179 const tree *t2 = (const tree *)b;
1181 int uid1 = DECL_UID (*t1);
1182 int uid2 = DECL_UID (*t2);
1184 if (uid1 < uid2)
1185 return -1;
1186 else if (uid1 > uid2)
1187 return 1;
1188 else
1189 return 0;
1192 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1193 depending on POISON flag. Created statement is appended
1194 to SEQ_P gimple sequence. */
1196 static void
1197 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1199 unsigned c = variables->elements ();
1200 if (c == 0)
1201 return;
1203 auto_vec<tree> sorted_variables (c);
1205 for (hash_set<tree>::iterator it = variables->begin ();
1206 it != variables->end (); ++it)
1207 sorted_variables.safe_push (*it);
1209 sorted_variables.qsort (sort_by_decl_uid);
1211 unsigned i;
1212 tree var;
1213 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1215 asan_poison_variable (var, poison, seq_p);
1217 /* Add use_after_scope_memory attribute for the variable in order
1218 to prevent re-written into SSA. */
1219 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1220 DECL_ATTRIBUTES (var)))
1221 DECL_ATTRIBUTES (var)
1222 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1223 integer_one_node,
1224 DECL_ATTRIBUTES (var));
1228 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1230 static enum gimplify_status
1231 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1233 tree bind_expr = *expr_p;
1234 bool old_keep_stack = gimplify_ctxp->keep_stack;
1235 bool old_save_stack = gimplify_ctxp->save_stack;
1236 tree t;
1237 gbind *bind_stmt;
1238 gimple_seq body, cleanup;
1239 gcall *stack_save;
1240 location_t start_locus = 0, end_locus = 0;
1241 tree ret_clauses = NULL;
1243 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1245 /* Mark variables seen in this bind expr. */
1246 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1248 if (VAR_P (t))
1250 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1252 /* Mark variable as local. */
1253 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1254 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1255 || splay_tree_lookup (ctx->variables,
1256 (splay_tree_key) t) == NULL))
1258 if (ctx->region_type == ORT_SIMD
1259 && TREE_ADDRESSABLE (t)
1260 && !TREE_STATIC (t))
1261 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1262 else
1263 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1266 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1268 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1269 cfun->has_local_explicit_reg_vars = true;
1272 /* Preliminarily mark non-addressed complex variables as eligible
1273 for promotion to gimple registers. We'll transform their uses
1274 as we find them. */
1275 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1276 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1277 && !TREE_THIS_VOLATILE (t)
1278 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1279 && !needs_to_live_in_memory (t))
1280 DECL_GIMPLE_REG_P (t) = 1;
1283 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1284 BIND_EXPR_BLOCK (bind_expr));
1285 gimple_push_bind_expr (bind_stmt);
1287 gimplify_ctxp->keep_stack = false;
1288 gimplify_ctxp->save_stack = false;
1290 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1291 body = NULL;
1292 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1293 gimple_bind_set_body (bind_stmt, body);
1295 /* Source location wise, the cleanup code (stack_restore and clobbers)
1296 belongs to the end of the block, so propagate what we have. The
1297 stack_save operation belongs to the beginning of block, which we can
1298 infer from the bind_expr directly if the block has no explicit
1299 assignment. */
1300 if (BIND_EXPR_BLOCK (bind_expr))
1302 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1303 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1305 if (start_locus == 0)
1306 start_locus = EXPR_LOCATION (bind_expr);
1308 cleanup = NULL;
1309 stack_save = NULL;
1311 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1312 the stack space allocated to the VLAs. */
1313 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1315 gcall *stack_restore;
1317 /* Save stack on entry and restore it on exit. Add a try_finally
1318 block to achieve this. */
1319 build_stack_save_restore (&stack_save, &stack_restore);
1321 gimple_set_location (stack_save, start_locus);
1322 gimple_set_location (stack_restore, end_locus);
1324 gimplify_seq_add_stmt (&cleanup, stack_restore);
1327 /* Add clobbers for all variables that go out of scope. */
1328 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1330 if (VAR_P (t)
1331 && !is_global_var (t)
1332 && DECL_CONTEXT (t) == current_function_decl)
1334 if (!DECL_HARD_REGISTER (t)
1335 && !TREE_THIS_VOLATILE (t)
1336 && !DECL_HAS_VALUE_EXPR_P (t)
1337 /* Only care for variables that have to be in memory. Others
1338 will be rewritten into SSA names, hence moved to the
1339 top-level. */
1340 && !is_gimple_reg (t)
1341 && flag_stack_reuse != SR_NONE)
1343 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1344 gimple *clobber_stmt;
1345 TREE_THIS_VOLATILE (clobber) = 1;
1346 clobber_stmt = gimple_build_assign (t, clobber);
1347 gimple_set_location (clobber_stmt, end_locus);
1348 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1351 if (flag_openacc && oacc_declare_returns != NULL)
1353 tree *c = oacc_declare_returns->get (t);
1354 if (c != NULL)
1356 if (ret_clauses)
1357 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1359 ret_clauses = *c;
1361 oacc_declare_returns->remove (t);
1363 if (oacc_declare_returns->elements () == 0)
1365 delete oacc_declare_returns;
1366 oacc_declare_returns = NULL;
1372 if (asan_poisoned_variables != NULL
1373 && asan_poisoned_variables->contains (t))
1375 asan_poisoned_variables->remove (t);
1376 asan_poison_variable (t, true, &cleanup);
1379 if (gimplify_ctxp->live_switch_vars != NULL
1380 && gimplify_ctxp->live_switch_vars->contains (t))
1381 gimplify_ctxp->live_switch_vars->remove (t);
1384 if (ret_clauses)
1386 gomp_target *stmt;
1387 gimple_stmt_iterator si = gsi_start (cleanup);
1389 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1390 ret_clauses);
1391 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1394 if (cleanup)
1396 gtry *gs;
1397 gimple_seq new_body;
1399 new_body = NULL;
1400 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1401 GIMPLE_TRY_FINALLY);
1403 if (stack_save)
1404 gimplify_seq_add_stmt (&new_body, stack_save);
1405 gimplify_seq_add_stmt (&new_body, gs);
1406 gimple_bind_set_body (bind_stmt, new_body);
1409 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1410 if (!gimplify_ctxp->keep_stack)
1411 gimplify_ctxp->keep_stack = old_keep_stack;
1412 gimplify_ctxp->save_stack = old_save_stack;
1414 gimple_pop_bind_expr ();
1416 gimplify_seq_add_stmt (pre_p, bind_stmt);
1418 if (temp)
1420 *expr_p = temp;
1421 return GS_OK;
1424 *expr_p = NULL_TREE;
1425 return GS_ALL_DONE;
1428 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1429 GIMPLE value, it is assigned to a new temporary and the statement is
1430 re-written to return the temporary.
1432 PRE_P points to the sequence where side effects that must happen before
1433 STMT should be stored. */
1435 static enum gimplify_status
1436 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1438 greturn *ret;
1439 tree ret_expr = TREE_OPERAND (stmt, 0);
1440 tree result_decl, result;
1442 if (ret_expr == error_mark_node)
1443 return GS_ERROR;
1445 /* Implicit _Cilk_sync must be inserted right before any return statement
1446 if there is a _Cilk_spawn in the function. If the user has provided a
1447 _Cilk_sync, the optimizer should remove this duplicate one. */
1448 if (fn_contains_cilk_spawn_p (cfun))
1450 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1451 gimplify_and_add (impl_sync, pre_p);
1454 if (!ret_expr
1455 || TREE_CODE (ret_expr) == RESULT_DECL
1456 || ret_expr == error_mark_node)
1458 greturn *ret = gimple_build_return (ret_expr);
1459 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1460 gimplify_seq_add_stmt (pre_p, ret);
1461 return GS_ALL_DONE;
1464 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1465 result_decl = NULL_TREE;
1466 else
1468 result_decl = TREE_OPERAND (ret_expr, 0);
1470 /* See through a return by reference. */
1471 if (TREE_CODE (result_decl) == INDIRECT_REF)
1472 result_decl = TREE_OPERAND (result_decl, 0);
1474 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1475 || TREE_CODE (ret_expr) == INIT_EXPR)
1476 && TREE_CODE (result_decl) == RESULT_DECL);
1479 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1480 Recall that aggregate_value_p is FALSE for any aggregate type that is
1481 returned in registers. If we're returning values in registers, then
1482 we don't want to extend the lifetime of the RESULT_DECL, particularly
1483 across another call. In addition, for those aggregates for which
1484 hard_function_value generates a PARALLEL, we'll die during normal
1485 expansion of structure assignments; there's special code in expand_return
1486 to handle this case that does not exist in expand_expr. */
1487 if (!result_decl)
1488 result = NULL_TREE;
1489 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1491 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1493 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1494 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1495 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1496 should be effectively allocated by the caller, i.e. all calls to
1497 this function must be subject to the Return Slot Optimization. */
1498 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1499 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1501 result = result_decl;
1503 else if (gimplify_ctxp->return_temp)
1504 result = gimplify_ctxp->return_temp;
1505 else
1507 result = create_tmp_reg (TREE_TYPE (result_decl));
1509 /* ??? With complex control flow (usually involving abnormal edges),
1510 we can wind up warning about an uninitialized value for this. Due
1511 to how this variable is constructed and initialized, this is never
1512 true. Give up and never warn. */
1513 TREE_NO_WARNING (result) = 1;
1515 gimplify_ctxp->return_temp = result;
1518 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1519 Then gimplify the whole thing. */
1520 if (result != result_decl)
1521 TREE_OPERAND (ret_expr, 0) = result;
1523 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1525 ret = gimple_build_return (result);
1526 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1527 gimplify_seq_add_stmt (pre_p, ret);
1529 return GS_ALL_DONE;
1532 /* Gimplify a variable-length array DECL. */
1534 static void
1535 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1537 /* This is a variable-sized decl. Simplify its size and mark it
1538 for deferred expansion. */
1539 tree t, addr, ptr_type;
1541 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1542 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1544 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1545 if (DECL_HAS_VALUE_EXPR_P (decl))
1546 return;
1548 /* All occurrences of this decl in final gimplified code will be
1549 replaced by indirection. Setting DECL_VALUE_EXPR does two
1550 things: First, it lets the rest of the gimplifier know what
1551 replacement to use. Second, it lets the debug info know
1552 where to find the value. */
1553 ptr_type = build_pointer_type (TREE_TYPE (decl));
1554 addr = create_tmp_var (ptr_type, get_name (decl));
1555 DECL_IGNORED_P (addr) = 0;
1556 t = build_fold_indirect_ref (addr);
1557 TREE_THIS_NOTRAP (t) = 1;
1558 SET_DECL_VALUE_EXPR (decl, t);
1559 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1561 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1562 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1563 size_int (DECL_ALIGN (decl)));
1564 /* The call has been built for a variable-sized object. */
1565 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1566 t = fold_convert (ptr_type, t);
1567 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1569 gimplify_and_add (t, seq_p);
1572 /* A helper function to be called via walk_tree. Mark all labels under *TP
1573 as being forced. To be called for DECL_INITIAL of static variables. */
1575 static tree
1576 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1578 if (TYPE_P (*tp))
1579 *walk_subtrees = 0;
1580 if (TREE_CODE (*tp) == LABEL_DECL)
1582 FORCED_LABEL (*tp) = 1;
1583 cfun->has_forced_label_in_static = 1;
1586 return NULL_TREE;
1589 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1590 and initialization explicit. */
1592 static enum gimplify_status
1593 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1595 tree stmt = *stmt_p;
1596 tree decl = DECL_EXPR_DECL (stmt);
1598 *stmt_p = NULL_TREE;
1600 if (TREE_TYPE (decl) == error_mark_node)
1601 return GS_ERROR;
1603 if ((TREE_CODE (decl) == TYPE_DECL
1604 || VAR_P (decl))
1605 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1607 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1608 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1609 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1612 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1613 in case its size expressions contain problematic nodes like CALL_EXPR. */
1614 if (TREE_CODE (decl) == TYPE_DECL
1615 && DECL_ORIGINAL_TYPE (decl)
1616 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1618 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1619 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1620 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1623 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1625 tree init = DECL_INITIAL (decl);
1626 bool is_vla = false;
1628 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1629 || (!TREE_STATIC (decl)
1630 && flag_stack_check == GENERIC_STACK_CHECK
1631 && compare_tree_int (DECL_SIZE_UNIT (decl),
1632 STACK_CHECK_MAX_VAR_SIZE) > 0))
1634 gimplify_vla_decl (decl, seq_p);
1635 is_vla = true;
1638 if (asan_poisoned_variables
1639 && !is_vla
1640 && TREE_ADDRESSABLE (decl)
1641 && !TREE_STATIC (decl)
1642 && !DECL_HAS_VALUE_EXPR_P (decl)
1643 && dbg_cnt (asan_use_after_scope))
1645 asan_poisoned_variables->add (decl);
1646 asan_poison_variable (decl, false, seq_p);
1647 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1648 gimplify_ctxp->live_switch_vars->add (decl);
1651 /* Some front ends do not explicitly declare all anonymous
1652 artificial variables. We compensate here by declaring the
1653 variables, though it would be better if the front ends would
1654 explicitly declare them. */
1655 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1656 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1657 gimple_add_tmp_var (decl);
1659 if (init && init != error_mark_node)
1661 if (!TREE_STATIC (decl))
1663 DECL_INITIAL (decl) = NULL_TREE;
1664 init = build2 (INIT_EXPR, void_type_node, decl, init);
1665 gimplify_and_add (init, seq_p);
1666 ggc_free (init);
1668 else
1669 /* We must still examine initializers for static variables
1670 as they may contain a label address. */
1671 walk_tree (&init, force_labels_r, NULL, NULL);
1675 return GS_ALL_DONE;
1678 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1679 and replacing the LOOP_EXPR with goto, but if the loop contains an
1680 EXIT_EXPR, we need to append a label for it to jump to. */
1682 static enum gimplify_status
1683 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1685 tree saved_label = gimplify_ctxp->exit_label;
1686 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1688 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1690 gimplify_ctxp->exit_label = NULL_TREE;
1692 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1694 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1696 if (gimplify_ctxp->exit_label)
1697 gimplify_seq_add_stmt (pre_p,
1698 gimple_build_label (gimplify_ctxp->exit_label));
1700 gimplify_ctxp->exit_label = saved_label;
1702 *expr_p = NULL;
1703 return GS_ALL_DONE;
1706 /* Gimplify a statement list onto a sequence. These may be created either
1707 by an enlightened front-end, or by shortcut_cond_expr. */
1709 static enum gimplify_status
1710 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1712 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1714 tree_stmt_iterator i = tsi_start (*expr_p);
1716 while (!tsi_end_p (i))
1718 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1719 tsi_delink (&i);
1722 if (temp)
1724 *expr_p = temp;
1725 return GS_OK;
1728 return GS_ALL_DONE;
1731 /* Callback for walk_gimple_seq. */
1733 static tree
1734 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1735 struct walk_stmt_info *wi)
1737 gimple *stmt = gsi_stmt (*gsi_p);
1739 *handled_ops_p = true;
1740 switch (gimple_code (stmt))
1742 case GIMPLE_TRY:
1743 /* A compiler-generated cleanup or a user-written try block.
1744 If it's empty, don't dive into it--that would result in
1745 worse location info. */
1746 if (gimple_try_eval (stmt) == NULL)
1748 wi->info = stmt;
1749 return integer_zero_node;
1751 /* Fall through. */
1752 case GIMPLE_BIND:
1753 case GIMPLE_CATCH:
1754 case GIMPLE_EH_FILTER:
1755 case GIMPLE_TRANSACTION:
1756 /* Walk the sub-statements. */
1757 *handled_ops_p = false;
1758 break;
1759 case GIMPLE_CALL:
1760 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1762 *handled_ops_p = false;
1763 break;
1765 /* Fall through. */
1766 default:
1767 /* Save the first "real" statement (not a decl/lexical scope/...). */
1768 wi->info = stmt;
1769 return integer_zero_node;
1771 return NULL_TREE;
1774 /* Possibly warn about unreachable statements between switch's controlling
1775 expression and the first case. SEQ is the body of a switch expression. */
1777 static void
1778 maybe_warn_switch_unreachable (gimple_seq seq)
1780 if (!warn_switch_unreachable
1781 /* This warning doesn't play well with Fortran when optimizations
1782 are on. */
1783 || lang_GNU_Fortran ()
1784 || seq == NULL)
1785 return;
1787 struct walk_stmt_info wi;
1788 memset (&wi, 0, sizeof (wi));
1789 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1790 gimple *stmt = (gimple *) wi.info;
1792 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1794 if (gimple_code (stmt) == GIMPLE_GOTO
1795 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1796 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1797 /* Don't warn for compiler-generated gotos. These occur
1798 in Duff's devices, for example. */;
1799 else
1800 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1801 "statement will never be executed");
1806 /* A label entry that pairs label and a location. */
1807 struct label_entry
1809 tree label;
1810 location_t loc;
1813 /* Find LABEL in vector of label entries VEC. */
1815 static struct label_entry *
1816 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1818 unsigned int i;
1819 struct label_entry *l;
1821 FOR_EACH_VEC_ELT (*vec, i, l)
1822 if (l->label == label)
1823 return l;
1824 return NULL;
1827 /* Return true if LABEL, a LABEL_DECL, represents a case label
1828 in a vector of labels CASES. */
1830 static bool
1831 case_label_p (const vec<tree> *cases, tree label)
1833 unsigned int i;
1834 tree l;
1836 FOR_EACH_VEC_ELT (*cases, i, l)
1837 if (CASE_LABEL (l) == label)
1838 return true;
1839 return false;
1842 /* Find the last statement in a scope STMT. */
1844 static gimple *
1845 last_stmt_in_scope (gimple *stmt)
1847 if (!stmt)
1848 return NULL;
1850 switch (gimple_code (stmt))
1852 case GIMPLE_BIND:
1854 gbind *bind = as_a <gbind *> (stmt);
1855 stmt = gimple_seq_last_stmt (gimple_bind_body (bind));
1856 return last_stmt_in_scope (stmt);
1859 case GIMPLE_TRY:
1861 gtry *try_stmt = as_a <gtry *> (stmt);
1862 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt));
1863 gimple *last_eval = last_stmt_in_scope (stmt);
1864 if (gimple_stmt_may_fallthru (last_eval)
1865 && (last_eval == NULL
1866 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1867 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1869 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt));
1870 return last_stmt_in_scope (stmt);
1872 else
1873 return last_eval;
1876 default:
1877 return stmt;
1881 /* Collect interesting labels in LABELS and return the statement preceding
1882 another case label, or a user-defined label. */
1884 static gimple *
1885 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1886 auto_vec <struct label_entry> *labels)
1888 gimple *prev = NULL;
1892 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1893 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1895 /* Nested scope. Only look at the last statement of
1896 the innermost scope. */
1897 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1898 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1899 if (last)
1901 prev = last;
1902 /* It might be a label without a location. Use the
1903 location of the scope then. */
1904 if (!gimple_has_location (prev))
1905 gimple_set_location (prev, bind_loc);
1907 gsi_next (gsi_p);
1908 continue;
1911 /* Ifs are tricky. */
1912 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1914 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1915 tree false_lab = gimple_cond_false_label (cond_stmt);
1916 location_t if_loc = gimple_location (cond_stmt);
1918 /* If we have e.g.
1919 if (i > 1) goto <D.2259>; else goto D;
1920 we can't do much with the else-branch. */
1921 if (!DECL_ARTIFICIAL (false_lab))
1922 break;
1924 /* Go on until the false label, then one step back. */
1925 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
1927 gimple *stmt = gsi_stmt (*gsi_p);
1928 if (gimple_code (stmt) == GIMPLE_LABEL
1929 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
1930 break;
1933 /* Not found? Oops. */
1934 if (gsi_end_p (*gsi_p))
1935 break;
1937 struct label_entry l = { false_lab, if_loc };
1938 labels->safe_push (l);
1940 /* Go to the last statement of the then branch. */
1941 gsi_prev (gsi_p);
1943 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1944 <D.1759>:
1945 <stmt>;
1946 goto <D.1761>;
1947 <D.1760>:
1949 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
1950 && !gimple_has_location (gsi_stmt (*gsi_p)))
1952 /* Look at the statement before, it might be
1953 attribute fallthrough, in which case don't warn. */
1954 gsi_prev (gsi_p);
1955 bool fallthru_before_dest
1956 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
1957 gsi_next (gsi_p);
1958 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
1959 if (!fallthru_before_dest)
1961 struct label_entry l = { goto_dest, if_loc };
1962 labels->safe_push (l);
1965 /* And move back. */
1966 gsi_next (gsi_p);
1969 /* Remember the last statement. Skip labels that are of no interest
1970 to us. */
1971 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
1973 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
1974 if (find_label_entry (labels, label))
1975 prev = gsi_stmt (*gsi_p);
1977 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
1979 else
1980 prev = gsi_stmt (*gsi_p);
1981 gsi_next (gsi_p);
1983 while (!gsi_end_p (*gsi_p)
1984 /* Stop if we find a case or a user-defined label. */
1985 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
1986 || !gimple_has_location (gsi_stmt (*gsi_p))));
1988 return prev;
1991 /* Return true if the switch fallthough warning should occur. LABEL is
1992 the label statement that we're falling through to. */
1994 static bool
1995 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
1997 gimple_stmt_iterator gsi = *gsi_p;
1999 /* Don't warn if the label is marked with a "falls through" comment. */
2000 if (FALLTHROUGH_LABEL_P (label))
2001 return false;
2003 /* Don't warn for non-case labels followed by a statement:
2004 case 0:
2005 foo ();
2006 label:
2007 bar ();
2008 as these are likely intentional. */
2009 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2011 tree l;
2012 while (!gsi_end_p (gsi)
2013 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2014 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2015 && !case_label_p (&gimplify_ctxp->case_labels, l))
2016 gsi_next (&gsi);
2017 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2018 return false;
2021 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2022 immediately breaks. */
2023 gsi = *gsi_p;
2025 /* Skip all immediately following labels. */
2026 while (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
2027 gsi_next (&gsi);
2029 /* { ... something; default:; } */
2030 if (gsi_end_p (gsi)
2031 /* { ... something; default: break; } or
2032 { ... something; default: goto L; } */
2033 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2034 /* { ... something; default: return; } */
2035 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2036 return false;
2038 return true;
2041 /* Callback for walk_gimple_seq. */
2043 static tree
2044 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2045 struct walk_stmt_info *)
2047 gimple *stmt = gsi_stmt (*gsi_p);
2049 *handled_ops_p = true;
2050 switch (gimple_code (stmt))
2052 case GIMPLE_TRY:
2053 case GIMPLE_BIND:
2054 case GIMPLE_CATCH:
2055 case GIMPLE_EH_FILTER:
2056 case GIMPLE_TRANSACTION:
2057 /* Walk the sub-statements. */
2058 *handled_ops_p = false;
2059 break;
2061 /* Find a sequence of form:
2063 GIMPLE_LABEL
2064 [...]
2065 <may fallthru stmt>
2066 GIMPLE_LABEL
2068 and possibly warn. */
2069 case GIMPLE_LABEL:
2071 /* Found a label. Skip all immediately following labels. */
2072 while (!gsi_end_p (*gsi_p)
2073 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2074 gsi_next (gsi_p);
2076 /* There might be no more statements. */
2077 if (gsi_end_p (*gsi_p))
2078 return integer_zero_node;
2080 /* Vector of labels that fall through. */
2081 auto_vec <struct label_entry> labels;
2082 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2084 /* There might be no more statements. */
2085 if (gsi_end_p (*gsi_p))
2086 return integer_zero_node;
2088 gimple *next = gsi_stmt (*gsi_p);
2089 tree label;
2090 /* If what follows is a label, then we may have a fallthrough. */
2091 if (gimple_code (next) == GIMPLE_LABEL
2092 && gimple_has_location (next)
2093 && (label = gimple_label_label (as_a <glabel *> (next)))
2094 && prev != NULL)
2096 struct label_entry *l;
2097 bool warned_p = false;
2098 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2099 /* Quiet. */;
2100 else if (gimple_code (prev) == GIMPLE_LABEL
2101 && (label = gimple_label_label (as_a <glabel *> (prev)))
2102 && (l = find_label_entry (&labels, label)))
2103 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2104 "this statement may fall through");
2105 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2106 /* Try to be clever and don't warn when the statement
2107 can't actually fall through. */
2108 && gimple_stmt_may_fallthru (prev)
2109 && gimple_has_location (prev))
2110 warned_p = warning_at (gimple_location (prev),
2111 OPT_Wimplicit_fallthrough_,
2112 "this statement may fall through");
2113 if (warned_p)
2114 inform (gimple_location (next), "here");
2116 /* Mark this label as processed so as to prevent multiple
2117 warnings in nested switches. */
2118 FALLTHROUGH_LABEL_P (label) = true;
2120 /* So that next warn_implicit_fallthrough_r will start looking for
2121 a new sequence starting with this label. */
2122 gsi_prev (gsi_p);
2125 break;
2126 default:
2127 break;
2129 return NULL_TREE;
2132 /* Warn when a switch case falls through. */
2134 static void
2135 maybe_warn_implicit_fallthrough (gimple_seq seq)
2137 if (!warn_implicit_fallthrough)
2138 return;
2140 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2141 if (!(lang_GNU_C ()
2142 || lang_GNU_CXX ()
2143 || lang_GNU_OBJC ()))
2144 return;
2146 struct walk_stmt_info wi;
2147 memset (&wi, 0, sizeof (wi));
2148 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2151 /* Callback for walk_gimple_seq. */
2153 static tree
2154 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2155 struct walk_stmt_info *)
2157 gimple *stmt = gsi_stmt (*gsi_p);
2159 *handled_ops_p = true;
2160 switch (gimple_code (stmt))
2162 case GIMPLE_TRY:
2163 case GIMPLE_BIND:
2164 case GIMPLE_CATCH:
2165 case GIMPLE_EH_FILTER:
2166 case GIMPLE_TRANSACTION:
2167 /* Walk the sub-statements. */
2168 *handled_ops_p = false;
2169 break;
2170 case GIMPLE_CALL:
2171 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2173 gsi_remove (gsi_p, true);
2174 if (gsi_end_p (*gsi_p))
2175 return integer_zero_node;
2177 bool found = false;
2178 location_t loc = gimple_location (stmt);
2180 gimple_stmt_iterator gsi2 = *gsi_p;
2181 stmt = gsi_stmt (gsi2);
2182 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2184 /* Go on until the artificial label. */
2185 tree goto_dest = gimple_goto_dest (stmt);
2186 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2188 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2189 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2190 == goto_dest)
2191 break;
2194 /* Not found? Stop. */
2195 if (gsi_end_p (gsi2))
2196 break;
2198 /* Look one past it. */
2199 gsi_next (&gsi2);
2202 /* We're looking for a case label or default label here. */
2203 while (!gsi_end_p (gsi2))
2205 stmt = gsi_stmt (gsi2);
2206 if (gimple_code (stmt) == GIMPLE_LABEL)
2208 tree label = gimple_label_label (as_a <glabel *> (stmt));
2209 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2211 found = true;
2212 break;
2215 else
2216 /* Something other than a label. That's not expected. */
2217 break;
2218 gsi_next (&gsi2);
2220 if (!found)
2221 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2222 "a case label or default label");
2224 break;
2225 default:
2226 break;
2228 return NULL_TREE;
2231 /* Expand all FALLTHROUGH () calls in SEQ. */
2233 static void
2234 expand_FALLTHROUGH (gimple_seq *seq_p)
2236 struct walk_stmt_info wi;
2237 memset (&wi, 0, sizeof (wi));
2238 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2242 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2243 branch to. */
2245 static enum gimplify_status
2246 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2248 tree switch_expr = *expr_p;
2249 gimple_seq switch_body_seq = NULL;
2250 enum gimplify_status ret;
2251 tree index_type = TREE_TYPE (switch_expr);
2252 if (index_type == NULL_TREE)
2253 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2255 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2256 fb_rvalue);
2257 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2258 return ret;
2260 if (SWITCH_BODY (switch_expr))
2262 vec<tree> labels;
2263 vec<tree> saved_labels;
2264 hash_set<tree> *saved_live_switch_vars = NULL;
2265 tree default_case = NULL_TREE;
2266 gswitch *switch_stmt;
2268 /* If someone can be bothered to fill in the labels, they can
2269 be bothered to null out the body too. */
2270 gcc_assert (!SWITCH_LABELS (switch_expr));
2272 /* Save old labels, get new ones from body, then restore the old
2273 labels. Save all the things from the switch body to append after. */
2274 saved_labels = gimplify_ctxp->case_labels;
2275 gimplify_ctxp->case_labels.create (8);
2277 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2278 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2279 if (TREE_CODE (SWITCH_BODY (switch_expr)) == BIND_EXPR)
2280 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2281 else
2282 gimplify_ctxp->live_switch_vars = NULL;
2284 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2285 gimplify_ctxp->in_switch_expr = true;
2287 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2289 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2290 maybe_warn_switch_unreachable (switch_body_seq);
2291 maybe_warn_implicit_fallthrough (switch_body_seq);
2292 /* Only do this for the outermost GIMPLE_SWITCH. */
2293 if (!gimplify_ctxp->in_switch_expr)
2294 expand_FALLTHROUGH (&switch_body_seq);
2296 labels = gimplify_ctxp->case_labels;
2297 gimplify_ctxp->case_labels = saved_labels;
2299 if (gimplify_ctxp->live_switch_vars)
2301 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2302 delete gimplify_ctxp->live_switch_vars;
2304 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2306 preprocess_case_label_vec_for_gimple (labels, index_type,
2307 &default_case);
2309 if (!default_case)
2311 glabel *new_default;
2313 default_case
2314 = build_case_label (NULL_TREE, NULL_TREE,
2315 create_artificial_label (UNKNOWN_LOCATION));
2316 new_default = gimple_build_label (CASE_LABEL (default_case));
2317 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2320 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2321 default_case, labels);
2322 gimplify_seq_add_stmt (pre_p, switch_stmt);
2323 gimplify_seq_add_seq (pre_p, switch_body_seq);
2324 labels.release ();
2326 else
2327 gcc_assert (SWITCH_LABELS (switch_expr));
2329 return GS_ALL_DONE;
2332 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2334 static enum gimplify_status
2335 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2337 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2338 == current_function_decl);
2340 glabel *label_stmt = gimple_build_label (LABEL_EXPR_LABEL (*expr_p));
2341 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2342 gimplify_seq_add_stmt (pre_p, label_stmt);
2344 return GS_ALL_DONE;
2347 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2349 static enum gimplify_status
2350 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2352 struct gimplify_ctx *ctxp;
2353 glabel *label_stmt;
2355 /* Invalid programs can play Duff's Device type games with, for example,
2356 #pragma omp parallel. At least in the C front end, we don't
2357 detect such invalid branches until after gimplification, in the
2358 diagnose_omp_blocks pass. */
2359 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2360 if (ctxp->case_labels.exists ())
2361 break;
2363 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2364 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2365 ctxp->case_labels.safe_push (*expr_p);
2366 gimplify_seq_add_stmt (pre_p, label_stmt);
2368 return GS_ALL_DONE;
2371 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2372 if necessary. */
2374 tree
2375 build_and_jump (tree *label_p)
2377 if (label_p == NULL)
2378 /* If there's nowhere to jump, just fall through. */
2379 return NULL_TREE;
2381 if (*label_p == NULL_TREE)
2383 tree label = create_artificial_label (UNKNOWN_LOCATION);
2384 *label_p = label;
2387 return build1 (GOTO_EXPR, void_type_node, *label_p);
2390 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2391 This also involves building a label to jump to and communicating it to
2392 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2394 static enum gimplify_status
2395 gimplify_exit_expr (tree *expr_p)
2397 tree cond = TREE_OPERAND (*expr_p, 0);
2398 tree expr;
2400 expr = build_and_jump (&gimplify_ctxp->exit_label);
2401 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2402 *expr_p = expr;
2404 return GS_OK;
2407 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2408 different from its canonical type, wrap the whole thing inside a
2409 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2410 type.
2412 The canonical type of a COMPONENT_REF is the type of the field being
2413 referenced--unless the field is a bit-field which can be read directly
2414 in a smaller mode, in which case the canonical type is the
2415 sign-appropriate type corresponding to that mode. */
2417 static void
2418 canonicalize_component_ref (tree *expr_p)
2420 tree expr = *expr_p;
2421 tree type;
2423 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2425 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2426 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2427 else
2428 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2430 /* One could argue that all the stuff below is not necessary for
2431 the non-bitfield case and declare it a FE error if type
2432 adjustment would be needed. */
2433 if (TREE_TYPE (expr) != type)
2435 #ifdef ENABLE_TYPES_CHECKING
2436 tree old_type = TREE_TYPE (expr);
2437 #endif
2438 int type_quals;
2440 /* We need to preserve qualifiers and propagate them from
2441 operand 0. */
2442 type_quals = TYPE_QUALS (type)
2443 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2444 if (TYPE_QUALS (type) != type_quals)
2445 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2447 /* Set the type of the COMPONENT_REF to the underlying type. */
2448 TREE_TYPE (expr) = type;
2450 #ifdef ENABLE_TYPES_CHECKING
2451 /* It is now a FE error, if the conversion from the canonical
2452 type to the original expression type is not useless. */
2453 gcc_assert (useless_type_conversion_p (old_type, type));
2454 #endif
2458 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2459 to foo, embed that change in the ADDR_EXPR by converting
2460 T array[U];
2461 (T *)&array
2463 &array[L]
2464 where L is the lower bound. For simplicity, only do this for constant
2465 lower bound.
2466 The constraint is that the type of &array[L] is trivially convertible
2467 to T *. */
2469 static void
2470 canonicalize_addr_expr (tree *expr_p)
2472 tree expr = *expr_p;
2473 tree addr_expr = TREE_OPERAND (expr, 0);
2474 tree datype, ddatype, pddatype;
2476 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2477 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2478 || TREE_CODE (addr_expr) != ADDR_EXPR)
2479 return;
2481 /* The addr_expr type should be a pointer to an array. */
2482 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2483 if (TREE_CODE (datype) != ARRAY_TYPE)
2484 return;
2486 /* The pointer to element type shall be trivially convertible to
2487 the expression pointer type. */
2488 ddatype = TREE_TYPE (datype);
2489 pddatype = build_pointer_type (ddatype);
2490 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2491 pddatype))
2492 return;
2494 /* The lower bound and element sizes must be constant. */
2495 if (!TYPE_SIZE_UNIT (ddatype)
2496 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2497 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2498 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2499 return;
2501 /* All checks succeeded. Build a new node to merge the cast. */
2502 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2503 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2504 NULL_TREE, NULL_TREE);
2505 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2507 /* We can have stripped a required restrict qualifier above. */
2508 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2509 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2512 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2513 underneath as appropriate. */
2515 static enum gimplify_status
2516 gimplify_conversion (tree *expr_p)
2518 location_t loc = EXPR_LOCATION (*expr_p);
2519 gcc_assert (CONVERT_EXPR_P (*expr_p));
2521 /* Then strip away all but the outermost conversion. */
2522 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2524 /* And remove the outermost conversion if it's useless. */
2525 if (tree_ssa_useless_type_conversion (*expr_p))
2526 *expr_p = TREE_OPERAND (*expr_p, 0);
2528 /* If we still have a conversion at the toplevel,
2529 then canonicalize some constructs. */
2530 if (CONVERT_EXPR_P (*expr_p))
2532 tree sub = TREE_OPERAND (*expr_p, 0);
2534 /* If a NOP conversion is changing the type of a COMPONENT_REF
2535 expression, then canonicalize its type now in order to expose more
2536 redundant conversions. */
2537 if (TREE_CODE (sub) == COMPONENT_REF)
2538 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2540 /* If a NOP conversion is changing a pointer to array of foo
2541 to a pointer to foo, embed that change in the ADDR_EXPR. */
2542 else if (TREE_CODE (sub) == ADDR_EXPR)
2543 canonicalize_addr_expr (expr_p);
2546 /* If we have a conversion to a non-register type force the
2547 use of a VIEW_CONVERT_EXPR instead. */
2548 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2549 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2550 TREE_OPERAND (*expr_p, 0));
2552 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2553 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2554 TREE_SET_CODE (*expr_p, NOP_EXPR);
2556 return GS_OK;
2559 /* Nonlocal VLAs seen in the current function. */
2560 static hash_set<tree> *nonlocal_vlas;
2562 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2563 static tree nonlocal_vla_vars;
2565 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2566 DECL_VALUE_EXPR, and it's worth re-examining things. */
2568 static enum gimplify_status
2569 gimplify_var_or_parm_decl (tree *expr_p)
2571 tree decl = *expr_p;
2573 /* ??? If this is a local variable, and it has not been seen in any
2574 outer BIND_EXPR, then it's probably the result of a duplicate
2575 declaration, for which we've already issued an error. It would
2576 be really nice if the front end wouldn't leak these at all.
2577 Currently the only known culprit is C++ destructors, as seen
2578 in g++.old-deja/g++.jason/binding.C. */
2579 if (VAR_P (decl)
2580 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2581 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2582 && decl_function_context (decl) == current_function_decl)
2584 gcc_assert (seen_error ());
2585 return GS_ERROR;
2588 /* When within an OMP context, notice uses of variables. */
2589 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2590 return GS_ALL_DONE;
2592 /* If the decl is an alias for another expression, substitute it now. */
2593 if (DECL_HAS_VALUE_EXPR_P (decl))
2595 tree value_expr = DECL_VALUE_EXPR (decl);
2597 /* For referenced nonlocal VLAs add a decl for debugging purposes
2598 to the current function. */
2599 if (VAR_P (decl)
2600 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2601 && nonlocal_vlas != NULL
2602 && TREE_CODE (value_expr) == INDIRECT_REF
2603 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2604 && decl_function_context (decl) != current_function_decl)
2606 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2607 while (ctx
2608 && (ctx->region_type == ORT_WORKSHARE
2609 || ctx->region_type == ORT_SIMD
2610 || ctx->region_type == ORT_ACC))
2611 ctx = ctx->outer_context;
2612 if (!ctx && !nonlocal_vlas->add (decl))
2614 tree copy = copy_node (decl);
2616 lang_hooks.dup_lang_specific_decl (copy);
2617 SET_DECL_RTL (copy, 0);
2618 TREE_USED (copy) = 1;
2619 DECL_CHAIN (copy) = nonlocal_vla_vars;
2620 nonlocal_vla_vars = copy;
2621 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2622 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2626 *expr_p = unshare_expr (value_expr);
2627 return GS_OK;
2630 return GS_ALL_DONE;
2633 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2635 static void
2636 recalculate_side_effects (tree t)
2638 enum tree_code code = TREE_CODE (t);
2639 int len = TREE_OPERAND_LENGTH (t);
2640 int i;
2642 switch (TREE_CODE_CLASS (code))
2644 case tcc_expression:
2645 switch (code)
2647 case INIT_EXPR:
2648 case MODIFY_EXPR:
2649 case VA_ARG_EXPR:
2650 case PREDECREMENT_EXPR:
2651 case PREINCREMENT_EXPR:
2652 case POSTDECREMENT_EXPR:
2653 case POSTINCREMENT_EXPR:
2654 /* All of these have side-effects, no matter what their
2655 operands are. */
2656 return;
2658 default:
2659 break;
2661 /* Fall through. */
2663 case tcc_comparison: /* a comparison expression */
2664 case tcc_unary: /* a unary arithmetic expression */
2665 case tcc_binary: /* a binary arithmetic expression */
2666 case tcc_reference: /* a reference */
2667 case tcc_vl_exp: /* a function call */
2668 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2669 for (i = 0; i < len; ++i)
2671 tree op = TREE_OPERAND (t, i);
2672 if (op && TREE_SIDE_EFFECTS (op))
2673 TREE_SIDE_EFFECTS (t) = 1;
2675 break;
2677 case tcc_constant:
2678 /* No side-effects. */
2679 return;
2681 default:
2682 gcc_unreachable ();
2686 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2687 node *EXPR_P.
2689 compound_lval
2690 : min_lval '[' val ']'
2691 | min_lval '.' ID
2692 | compound_lval '[' val ']'
2693 | compound_lval '.' ID
2695 This is not part of the original SIMPLE definition, which separates
2696 array and member references, but it seems reasonable to handle them
2697 together. Also, this way we don't run into problems with union
2698 aliasing; gcc requires that for accesses through a union to alias, the
2699 union reference must be explicit, which was not always the case when we
2700 were splitting up array and member refs.
2702 PRE_P points to the sequence where side effects that must happen before
2703 *EXPR_P should be stored.
2705 POST_P points to the sequence where side effects that must happen after
2706 *EXPR_P should be stored. */
2708 static enum gimplify_status
2709 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2710 fallback_t fallback)
2712 tree *p;
2713 enum gimplify_status ret = GS_ALL_DONE, tret;
2714 int i;
2715 location_t loc = EXPR_LOCATION (*expr_p);
2716 tree expr = *expr_p;
2718 /* Create a stack of the subexpressions so later we can walk them in
2719 order from inner to outer. */
2720 auto_vec<tree, 10> expr_stack;
2722 /* We can handle anything that get_inner_reference can deal with. */
2723 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2725 restart:
2726 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2727 if (TREE_CODE (*p) == INDIRECT_REF)
2728 *p = fold_indirect_ref_loc (loc, *p);
2730 if (handled_component_p (*p))
2732 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2733 additional COMPONENT_REFs. */
2734 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2735 && gimplify_var_or_parm_decl (p) == GS_OK)
2736 goto restart;
2737 else
2738 break;
2740 expr_stack.safe_push (*p);
2743 gcc_assert (expr_stack.length ());
2745 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2746 walked through and P points to the innermost expression.
2748 Java requires that we elaborated nodes in source order. That
2749 means we must gimplify the inner expression followed by each of
2750 the indices, in order. But we can't gimplify the inner
2751 expression until we deal with any variable bounds, sizes, or
2752 positions in order to deal with PLACEHOLDER_EXPRs.
2754 So we do this in three steps. First we deal with the annotations
2755 for any variables in the components, then we gimplify the base,
2756 then we gimplify any indices, from left to right. */
2757 for (i = expr_stack.length () - 1; i >= 0; i--)
2759 tree t = expr_stack[i];
2761 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2763 /* Gimplify the low bound and element type size and put them into
2764 the ARRAY_REF. If these values are set, they have already been
2765 gimplified. */
2766 if (TREE_OPERAND (t, 2) == NULL_TREE)
2768 tree low = unshare_expr (array_ref_low_bound (t));
2769 if (!is_gimple_min_invariant (low))
2771 TREE_OPERAND (t, 2) = low;
2772 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2773 post_p, is_gimple_reg,
2774 fb_rvalue);
2775 ret = MIN (ret, tret);
2778 else
2780 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2781 is_gimple_reg, fb_rvalue);
2782 ret = MIN (ret, tret);
2785 if (TREE_OPERAND (t, 3) == NULL_TREE)
2787 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2788 tree elmt_size = unshare_expr (array_ref_element_size (t));
2789 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2791 /* Divide the element size by the alignment of the element
2792 type (above). */
2793 elmt_size
2794 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2796 if (!is_gimple_min_invariant (elmt_size))
2798 TREE_OPERAND (t, 3) = elmt_size;
2799 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2800 post_p, is_gimple_reg,
2801 fb_rvalue);
2802 ret = MIN (ret, tret);
2805 else
2807 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2808 is_gimple_reg, fb_rvalue);
2809 ret = MIN (ret, tret);
2812 else if (TREE_CODE (t) == COMPONENT_REF)
2814 /* Set the field offset into T and gimplify it. */
2815 if (TREE_OPERAND (t, 2) == NULL_TREE)
2817 tree offset = unshare_expr (component_ref_field_offset (t));
2818 tree field = TREE_OPERAND (t, 1);
2819 tree factor
2820 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2822 /* Divide the offset by its alignment. */
2823 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2825 if (!is_gimple_min_invariant (offset))
2827 TREE_OPERAND (t, 2) = offset;
2828 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2829 post_p, is_gimple_reg,
2830 fb_rvalue);
2831 ret = MIN (ret, tret);
2834 else
2836 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2837 is_gimple_reg, fb_rvalue);
2838 ret = MIN (ret, tret);
2843 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2844 so as to match the min_lval predicate. Failure to do so may result
2845 in the creation of large aggregate temporaries. */
2846 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2847 fallback | fb_lvalue);
2848 ret = MIN (ret, tret);
2850 /* And finally, the indices and operands of ARRAY_REF. During this
2851 loop we also remove any useless conversions. */
2852 for (; expr_stack.length () > 0; )
2854 tree t = expr_stack.pop ();
2856 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2858 /* Gimplify the dimension. */
2859 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2861 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2862 is_gimple_val, fb_rvalue);
2863 ret = MIN (ret, tret);
2867 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2869 /* The innermost expression P may have originally had
2870 TREE_SIDE_EFFECTS set which would have caused all the outer
2871 expressions in *EXPR_P leading to P to also have had
2872 TREE_SIDE_EFFECTS set. */
2873 recalculate_side_effects (t);
2876 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2877 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2879 canonicalize_component_ref (expr_p);
2882 expr_stack.release ();
2884 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2886 return ret;
2889 /* Gimplify the self modifying expression pointed to by EXPR_P
2890 (++, --, +=, -=).
2892 PRE_P points to the list where side effects that must happen before
2893 *EXPR_P should be stored.
2895 POST_P points to the list where side effects that must happen after
2896 *EXPR_P should be stored.
2898 WANT_VALUE is nonzero iff we want to use the value of this expression
2899 in another expression.
2901 ARITH_TYPE is the type the computation should be performed in. */
2903 enum gimplify_status
2904 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2905 bool want_value, tree arith_type)
2907 enum tree_code code;
2908 tree lhs, lvalue, rhs, t1;
2909 gimple_seq post = NULL, *orig_post_p = post_p;
2910 bool postfix;
2911 enum tree_code arith_code;
2912 enum gimplify_status ret;
2913 location_t loc = EXPR_LOCATION (*expr_p);
2915 code = TREE_CODE (*expr_p);
2917 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2918 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2920 /* Prefix or postfix? */
2921 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2922 /* Faster to treat as prefix if result is not used. */
2923 postfix = want_value;
2924 else
2925 postfix = false;
2927 /* For postfix, make sure the inner expression's post side effects
2928 are executed after side effects from this expression. */
2929 if (postfix)
2930 post_p = &post;
2932 /* Add or subtract? */
2933 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2934 arith_code = PLUS_EXPR;
2935 else
2936 arith_code = MINUS_EXPR;
2938 /* Gimplify the LHS into a GIMPLE lvalue. */
2939 lvalue = TREE_OPERAND (*expr_p, 0);
2940 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2941 if (ret == GS_ERROR)
2942 return ret;
2944 /* Extract the operands to the arithmetic operation. */
2945 lhs = lvalue;
2946 rhs = TREE_OPERAND (*expr_p, 1);
2948 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2949 that as the result value and in the postqueue operation. */
2950 if (postfix)
2952 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2953 if (ret == GS_ERROR)
2954 return ret;
2956 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2959 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2960 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2962 rhs = convert_to_ptrofftype_loc (loc, rhs);
2963 if (arith_code == MINUS_EXPR)
2964 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2965 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2967 else
2968 t1 = fold_convert (TREE_TYPE (*expr_p),
2969 fold_build2 (arith_code, arith_type,
2970 fold_convert (arith_type, lhs),
2971 fold_convert (arith_type, rhs)));
2973 if (postfix)
2975 gimplify_assign (lvalue, t1, pre_p);
2976 gimplify_seq_add_seq (orig_post_p, post);
2977 *expr_p = lhs;
2978 return GS_ALL_DONE;
2980 else
2982 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2983 return GS_OK;
2987 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2989 static void
2990 maybe_with_size_expr (tree *expr_p)
2992 tree expr = *expr_p;
2993 tree type = TREE_TYPE (expr);
2994 tree size;
2996 /* If we've already wrapped this or the type is error_mark_node, we can't do
2997 anything. */
2998 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2999 || type == error_mark_node)
3000 return;
3002 /* If the size isn't known or is a constant, we have nothing to do. */
3003 size = TYPE_SIZE_UNIT (type);
3004 if (!size || TREE_CODE (size) == INTEGER_CST)
3005 return;
3007 /* Otherwise, make a WITH_SIZE_EXPR. */
3008 size = unshare_expr (size);
3009 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3010 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3013 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3014 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3015 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3016 gimplified to an SSA name. */
3018 enum gimplify_status
3019 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3020 bool allow_ssa)
3022 bool (*test) (tree);
3023 fallback_t fb;
3025 /* In general, we allow lvalues for function arguments to avoid
3026 extra overhead of copying large aggregates out of even larger
3027 aggregates into temporaries only to copy the temporaries to
3028 the argument list. Make optimizers happy by pulling out to
3029 temporaries those types that fit in registers. */
3030 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3031 test = is_gimple_val, fb = fb_rvalue;
3032 else
3034 test = is_gimple_lvalue, fb = fb_either;
3035 /* Also strip a TARGET_EXPR that would force an extra copy. */
3036 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3038 tree init = TARGET_EXPR_INITIAL (*arg_p);
3039 if (init
3040 && !VOID_TYPE_P (TREE_TYPE (init)))
3041 *arg_p = init;
3045 /* If this is a variable sized type, we must remember the size. */
3046 maybe_with_size_expr (arg_p);
3048 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3049 /* Make sure arguments have the same location as the function call
3050 itself. */
3051 protected_set_expr_location (*arg_p, call_location);
3053 /* There is a sequence point before a function call. Side effects in
3054 the argument list must occur before the actual call. So, when
3055 gimplifying arguments, force gimplify_expr to use an internal
3056 post queue which is then appended to the end of PRE_P. */
3057 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3060 /* Don't fold inside offloading or taskreg regions: it can break code by
3061 adding decl references that weren't in the source. We'll do it during
3062 omplower pass instead. */
3064 static bool
3065 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3067 struct gimplify_omp_ctx *ctx;
3068 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3069 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3070 return false;
3071 return fold_stmt (gsi);
3074 /* Add a gimple call to __builtin_cilk_detach to GIMPLE sequence PRE_P,
3075 with the pointer to the proper cilk frame. */
3076 static void
3077 gimplify_cilk_detach (gimple_seq *pre_p)
3079 tree frame = cfun->cilk_frame_decl;
3080 tree ptrf = build1 (ADDR_EXPR, cilk_frame_ptr_type_decl,
3081 frame);
3082 gcall *detach = gimple_build_call (cilk_detach_fndecl, 1,
3083 ptrf);
3084 gimplify_seq_add_stmt(pre_p, detach);
3087 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3088 WANT_VALUE is true if the result of the call is desired. */
3090 static enum gimplify_status
3091 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3093 tree fndecl, parms, p, fnptrtype;
3094 enum gimplify_status ret;
3095 int i, nargs;
3096 gcall *call;
3097 bool builtin_va_start_p = false;
3098 location_t loc = EXPR_LOCATION (*expr_p);
3100 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3102 /* For reliable diagnostics during inlining, it is necessary that
3103 every call_expr be annotated with file and line. */
3104 if (! EXPR_HAS_LOCATION (*expr_p))
3105 SET_EXPR_LOCATION (*expr_p, input_location);
3107 /* Gimplify internal functions created in the FEs. */
3108 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3110 if (want_value)
3111 return GS_ALL_DONE;
3113 nargs = call_expr_nargs (*expr_p);
3114 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3115 auto_vec<tree> vargs (nargs);
3117 for (i = 0; i < nargs; i++)
3119 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3120 EXPR_LOCATION (*expr_p));
3121 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3124 if (EXPR_CILK_SPAWN (*expr_p))
3125 gimplify_cilk_detach (pre_p);
3126 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
3127 gimplify_seq_add_stmt (pre_p, call);
3128 return GS_ALL_DONE;
3131 /* This may be a call to a builtin function.
3133 Builtin function calls may be transformed into different
3134 (and more efficient) builtin function calls under certain
3135 circumstances. Unfortunately, gimplification can muck things
3136 up enough that the builtin expanders are not aware that certain
3137 transformations are still valid.
3139 So we attempt transformation/gimplification of the call before
3140 we gimplify the CALL_EXPR. At this time we do not manage to
3141 transform all calls in the same manner as the expanders do, but
3142 we do transform most of them. */
3143 fndecl = get_callee_fndecl (*expr_p);
3144 if (fndecl
3145 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3146 switch (DECL_FUNCTION_CODE (fndecl))
3148 case BUILT_IN_ALLOCA:
3149 case BUILT_IN_ALLOCA_WITH_ALIGN:
3150 /* If the call has been built for a variable-sized object, then we
3151 want to restore the stack level when the enclosing BIND_EXPR is
3152 exited to reclaim the allocated space; otherwise, we precisely
3153 need to do the opposite and preserve the latest stack level. */
3154 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3155 gimplify_ctxp->save_stack = true;
3156 else
3157 gimplify_ctxp->keep_stack = true;
3158 break;
3160 case BUILT_IN_VA_START:
3162 builtin_va_start_p = TRUE;
3163 if (call_expr_nargs (*expr_p) < 2)
3165 error ("too few arguments to function %<va_start%>");
3166 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3167 return GS_OK;
3170 if (fold_builtin_next_arg (*expr_p, true))
3172 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3173 return GS_OK;
3175 break;
3178 default:
3181 if (fndecl && DECL_BUILT_IN (fndecl))
3183 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3184 if (new_tree && new_tree != *expr_p)
3186 /* There was a transformation of this call which computes the
3187 same value, but in a more efficient way. Return and try
3188 again. */
3189 *expr_p = new_tree;
3190 return GS_OK;
3194 /* Remember the original function pointer type. */
3195 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3197 /* There is a sequence point before the call, so any side effects in
3198 the calling expression must occur before the actual call. Force
3199 gimplify_expr to use an internal post queue. */
3200 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3201 is_gimple_call_addr, fb_rvalue);
3203 nargs = call_expr_nargs (*expr_p);
3205 /* Get argument types for verification. */
3206 fndecl = get_callee_fndecl (*expr_p);
3207 parms = NULL_TREE;
3208 if (fndecl)
3209 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3210 else
3211 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3213 if (fndecl && DECL_ARGUMENTS (fndecl))
3214 p = DECL_ARGUMENTS (fndecl);
3215 else if (parms)
3216 p = parms;
3217 else
3218 p = NULL_TREE;
3219 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3222 /* If the last argument is __builtin_va_arg_pack () and it is not
3223 passed as a named argument, decrease the number of CALL_EXPR
3224 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3225 if (!p
3226 && i < nargs
3227 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3229 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3230 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3232 if (last_arg_fndecl
3233 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3234 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3235 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3237 tree call = *expr_p;
3239 --nargs;
3240 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3241 CALL_EXPR_FN (call),
3242 nargs, CALL_EXPR_ARGP (call));
3244 /* Copy all CALL_EXPR flags, location and block, except
3245 CALL_EXPR_VA_ARG_PACK flag. */
3246 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3247 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3248 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3249 = CALL_EXPR_RETURN_SLOT_OPT (call);
3250 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3251 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3253 /* Set CALL_EXPR_VA_ARG_PACK. */
3254 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3258 /* If the call returns twice then after building the CFG the call
3259 argument computations will no longer dominate the call because
3260 we add an abnormal incoming edge to the call. So do not use SSA
3261 vars there. */
3262 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3264 /* Gimplify the function arguments. */
3265 if (nargs > 0)
3267 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3268 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3269 PUSH_ARGS_REVERSED ? i-- : i++)
3271 enum gimplify_status t;
3273 /* Avoid gimplifying the second argument to va_start, which needs to
3274 be the plain PARM_DECL. */
3275 if ((i != 1) || !builtin_va_start_p)
3277 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3278 EXPR_LOCATION (*expr_p), ! returns_twice);
3280 if (t == GS_ERROR)
3281 ret = GS_ERROR;
3286 /* Gimplify the static chain. */
3287 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3289 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3290 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3291 else
3293 enum gimplify_status t;
3294 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3295 EXPR_LOCATION (*expr_p), ! returns_twice);
3296 if (t == GS_ERROR)
3297 ret = GS_ERROR;
3301 /* Verify the function result. */
3302 if (want_value && fndecl
3303 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3305 error_at (loc, "using result of function returning %<void%>");
3306 ret = GS_ERROR;
3309 /* Try this again in case gimplification exposed something. */
3310 if (ret != GS_ERROR)
3312 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3314 if (new_tree && new_tree != *expr_p)
3316 /* There was a transformation of this call which computes the
3317 same value, but in a more efficient way. Return and try
3318 again. */
3319 *expr_p = new_tree;
3320 return GS_OK;
3323 else
3325 *expr_p = error_mark_node;
3326 return GS_ERROR;
3329 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3330 decl. This allows us to eliminate redundant or useless
3331 calls to "const" functions. */
3332 if (TREE_CODE (*expr_p) == CALL_EXPR)
3334 int flags = call_expr_flags (*expr_p);
3335 if (flags & (ECF_CONST | ECF_PURE)
3336 /* An infinite loop is considered a side effect. */
3337 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3338 TREE_SIDE_EFFECTS (*expr_p) = 0;
3341 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3342 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3343 form and delegate the creation of a GIMPLE_CALL to
3344 gimplify_modify_expr. This is always possible because when
3345 WANT_VALUE is true, the caller wants the result of this call into
3346 a temporary, which means that we will emit an INIT_EXPR in
3347 internal_get_tmp_var which will then be handled by
3348 gimplify_modify_expr. */
3349 if (!want_value)
3351 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3352 have to do is replicate it as a GIMPLE_CALL tuple. */
3353 gimple_stmt_iterator gsi;
3354 call = gimple_build_call_from_tree (*expr_p);
3355 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
3356 notice_special_calls (call);
3357 if (EXPR_CILK_SPAWN (*expr_p))
3358 gimplify_cilk_detach (pre_p);
3359 gimplify_seq_add_stmt (pre_p, call);
3360 gsi = gsi_last (*pre_p);
3361 maybe_fold_stmt (&gsi);
3362 *expr_p = NULL_TREE;
3364 else
3365 /* Remember the original function type. */
3366 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3367 CALL_EXPR_FN (*expr_p));
3369 return ret;
3372 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3373 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3375 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3376 condition is true or false, respectively. If null, we should generate
3377 our own to skip over the evaluation of this specific expression.
3379 LOCUS is the source location of the COND_EXPR.
3381 This function is the tree equivalent of do_jump.
3383 shortcut_cond_r should only be called by shortcut_cond_expr. */
3385 static tree
3386 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3387 location_t locus)
3389 tree local_label = NULL_TREE;
3390 tree t, expr = NULL;
3392 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3393 retain the shortcut semantics. Just insert the gotos here;
3394 shortcut_cond_expr will append the real blocks later. */
3395 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3397 location_t new_locus;
3399 /* Turn if (a && b) into
3401 if (a); else goto no;
3402 if (b) goto yes; else goto no;
3403 (no:) */
3405 if (false_label_p == NULL)
3406 false_label_p = &local_label;
3408 /* Keep the original source location on the first 'if'. */
3409 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3410 append_to_statement_list (t, &expr);
3412 /* Set the source location of the && on the second 'if'. */
3413 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3414 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3415 new_locus);
3416 append_to_statement_list (t, &expr);
3418 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3420 location_t new_locus;
3422 /* Turn if (a || b) into
3424 if (a) goto yes;
3425 if (b) goto yes; else goto no;
3426 (yes:) */
3428 if (true_label_p == NULL)
3429 true_label_p = &local_label;
3431 /* Keep the original source location on the first 'if'. */
3432 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3433 append_to_statement_list (t, &expr);
3435 /* Set the source location of the || on the second 'if'. */
3436 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3437 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3438 new_locus);
3439 append_to_statement_list (t, &expr);
3441 else if (TREE_CODE (pred) == COND_EXPR
3442 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3443 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3445 location_t new_locus;
3447 /* As long as we're messing with gotos, turn if (a ? b : c) into
3448 if (a)
3449 if (b) goto yes; else goto no;
3450 else
3451 if (c) goto yes; else goto no;
3453 Don't do this if one of the arms has void type, which can happen
3454 in C++ when the arm is throw. */
3456 /* Keep the original source location on the first 'if'. Set the source
3457 location of the ? on the second 'if'. */
3458 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3459 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3460 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3461 false_label_p, locus),
3462 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3463 false_label_p, new_locus));
3465 else
3467 expr = build3 (COND_EXPR, void_type_node, pred,
3468 build_and_jump (true_label_p),
3469 build_and_jump (false_label_p));
3470 SET_EXPR_LOCATION (expr, locus);
3473 if (local_label)
3475 t = build1 (LABEL_EXPR, void_type_node, local_label);
3476 append_to_statement_list (t, &expr);
3479 return expr;
3482 /* Given a conditional expression EXPR with short-circuit boolean
3483 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3484 predicate apart into the equivalent sequence of conditionals. */
3486 static tree
3487 shortcut_cond_expr (tree expr)
3489 tree pred = TREE_OPERAND (expr, 0);
3490 tree then_ = TREE_OPERAND (expr, 1);
3491 tree else_ = TREE_OPERAND (expr, 2);
3492 tree true_label, false_label, end_label, t;
3493 tree *true_label_p;
3494 tree *false_label_p;
3495 bool emit_end, emit_false, jump_over_else;
3496 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3497 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3499 /* First do simple transformations. */
3500 if (!else_se)
3502 /* If there is no 'else', turn
3503 if (a && b) then c
3504 into
3505 if (a) if (b) then c. */
3506 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3508 /* Keep the original source location on the first 'if'. */
3509 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3510 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3511 /* Set the source location of the && on the second 'if'. */
3512 if (EXPR_HAS_LOCATION (pred))
3513 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3514 then_ = shortcut_cond_expr (expr);
3515 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3516 pred = TREE_OPERAND (pred, 0);
3517 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3518 SET_EXPR_LOCATION (expr, locus);
3522 if (!then_se)
3524 /* If there is no 'then', turn
3525 if (a || b); else d
3526 into
3527 if (a); else if (b); else d. */
3528 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3530 /* Keep the original source location on the first 'if'. */
3531 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3532 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3533 /* Set the source location of the || on the second 'if'. */
3534 if (EXPR_HAS_LOCATION (pred))
3535 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3536 else_ = shortcut_cond_expr (expr);
3537 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3538 pred = TREE_OPERAND (pred, 0);
3539 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3540 SET_EXPR_LOCATION (expr, locus);
3544 /* If we're done, great. */
3545 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3546 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3547 return expr;
3549 /* Otherwise we need to mess with gotos. Change
3550 if (a) c; else d;
3552 if (a); else goto no;
3553 c; goto end;
3554 no: d; end:
3555 and recursively gimplify the condition. */
3557 true_label = false_label = end_label = NULL_TREE;
3559 /* If our arms just jump somewhere, hijack those labels so we don't
3560 generate jumps to jumps. */
3562 if (then_
3563 && TREE_CODE (then_) == GOTO_EXPR
3564 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
3566 true_label = GOTO_DESTINATION (then_);
3567 then_ = NULL;
3568 then_se = false;
3571 if (else_
3572 && TREE_CODE (else_) == GOTO_EXPR
3573 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
3575 false_label = GOTO_DESTINATION (else_);
3576 else_ = NULL;
3577 else_se = false;
3580 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3581 if (true_label)
3582 true_label_p = &true_label;
3583 else
3584 true_label_p = NULL;
3586 /* The 'else' branch also needs a label if it contains interesting code. */
3587 if (false_label || else_se)
3588 false_label_p = &false_label;
3589 else
3590 false_label_p = NULL;
3592 /* If there was nothing else in our arms, just forward the label(s). */
3593 if (!then_se && !else_se)
3594 return shortcut_cond_r (pred, true_label_p, false_label_p,
3595 EXPR_LOC_OR_LOC (expr, input_location));
3597 /* If our last subexpression already has a terminal label, reuse it. */
3598 if (else_se)
3599 t = expr_last (else_);
3600 else if (then_se)
3601 t = expr_last (then_);
3602 else
3603 t = NULL;
3604 if (t && TREE_CODE (t) == LABEL_EXPR)
3605 end_label = LABEL_EXPR_LABEL (t);
3607 /* If we don't care about jumping to the 'else' branch, jump to the end
3608 if the condition is false. */
3609 if (!false_label_p)
3610 false_label_p = &end_label;
3612 /* We only want to emit these labels if we aren't hijacking them. */
3613 emit_end = (end_label == NULL_TREE);
3614 emit_false = (false_label == NULL_TREE);
3616 /* We only emit the jump over the else clause if we have to--if the
3617 then clause may fall through. Otherwise we can wind up with a
3618 useless jump and a useless label at the end of gimplified code,
3619 which will cause us to think that this conditional as a whole
3620 falls through even if it doesn't. If we then inline a function
3621 which ends with such a condition, that can cause us to issue an
3622 inappropriate warning about control reaching the end of a
3623 non-void function. */
3624 jump_over_else = block_may_fallthru (then_);
3626 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3627 EXPR_LOC_OR_LOC (expr, input_location));
3629 expr = NULL;
3630 append_to_statement_list (pred, &expr);
3632 append_to_statement_list (then_, &expr);
3633 if (else_se)
3635 if (jump_over_else)
3637 tree last = expr_last (expr);
3638 t = build_and_jump (&end_label);
3639 if (EXPR_HAS_LOCATION (last))
3640 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3641 append_to_statement_list (t, &expr);
3643 if (emit_false)
3645 t = build1 (LABEL_EXPR, void_type_node, false_label);
3646 append_to_statement_list (t, &expr);
3648 append_to_statement_list (else_, &expr);
3650 if (emit_end && end_label)
3652 t = build1 (LABEL_EXPR, void_type_node, end_label);
3653 append_to_statement_list (t, &expr);
3656 return expr;
3659 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3661 tree
3662 gimple_boolify (tree expr)
3664 tree type = TREE_TYPE (expr);
3665 location_t loc = EXPR_LOCATION (expr);
3667 if (TREE_CODE (expr) == NE_EXPR
3668 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3669 && integer_zerop (TREE_OPERAND (expr, 1)))
3671 tree call = TREE_OPERAND (expr, 0);
3672 tree fn = get_callee_fndecl (call);
3674 /* For __builtin_expect ((long) (x), y) recurse into x as well
3675 if x is truth_value_p. */
3676 if (fn
3677 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3678 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3679 && call_expr_nargs (call) == 2)
3681 tree arg = CALL_EXPR_ARG (call, 0);
3682 if (arg)
3684 if (TREE_CODE (arg) == NOP_EXPR
3685 && TREE_TYPE (arg) == TREE_TYPE (call))
3686 arg = TREE_OPERAND (arg, 0);
3687 if (truth_value_p (TREE_CODE (arg)))
3689 arg = gimple_boolify (arg);
3690 CALL_EXPR_ARG (call, 0)
3691 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3697 switch (TREE_CODE (expr))
3699 case TRUTH_AND_EXPR:
3700 case TRUTH_OR_EXPR:
3701 case TRUTH_XOR_EXPR:
3702 case TRUTH_ANDIF_EXPR:
3703 case TRUTH_ORIF_EXPR:
3704 /* Also boolify the arguments of truth exprs. */
3705 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3706 /* FALLTHRU */
3708 case TRUTH_NOT_EXPR:
3709 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3711 /* These expressions always produce boolean results. */
3712 if (TREE_CODE (type) != BOOLEAN_TYPE)
3713 TREE_TYPE (expr) = boolean_type_node;
3714 return expr;
3716 case ANNOTATE_EXPR:
3717 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3719 case annot_expr_ivdep_kind:
3720 case annot_expr_no_vector_kind:
3721 case annot_expr_vector_kind:
3722 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3723 if (TREE_CODE (type) != BOOLEAN_TYPE)
3724 TREE_TYPE (expr) = boolean_type_node;
3725 return expr;
3726 default:
3727 gcc_unreachable ();
3730 default:
3731 if (COMPARISON_CLASS_P (expr))
3733 /* There expressions always prduce boolean results. */
3734 if (TREE_CODE (type) != BOOLEAN_TYPE)
3735 TREE_TYPE (expr) = boolean_type_node;
3736 return expr;
3738 /* Other expressions that get here must have boolean values, but
3739 might need to be converted to the appropriate mode. */
3740 if (TREE_CODE (type) == BOOLEAN_TYPE)
3741 return expr;
3742 return fold_convert_loc (loc, boolean_type_node, expr);
3746 /* Given a conditional expression *EXPR_P without side effects, gimplify
3747 its operands. New statements are inserted to PRE_P. */
3749 static enum gimplify_status
3750 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3752 tree expr = *expr_p, cond;
3753 enum gimplify_status ret, tret;
3754 enum tree_code code;
3756 cond = gimple_boolify (COND_EXPR_COND (expr));
3758 /* We need to handle && and || specially, as their gimplification
3759 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3760 code = TREE_CODE (cond);
3761 if (code == TRUTH_ANDIF_EXPR)
3762 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3763 else if (code == TRUTH_ORIF_EXPR)
3764 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3765 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3766 COND_EXPR_COND (*expr_p) = cond;
3768 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3769 is_gimple_val, fb_rvalue);
3770 ret = MIN (ret, tret);
3771 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3772 is_gimple_val, fb_rvalue);
3774 return MIN (ret, tret);
3777 /* Return true if evaluating EXPR could trap.
3778 EXPR is GENERIC, while tree_could_trap_p can be called
3779 only on GIMPLE. */
3781 static bool
3782 generic_expr_could_trap_p (tree expr)
3784 unsigned i, n;
3786 if (!expr || is_gimple_val (expr))
3787 return false;
3789 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3790 return true;
3792 n = TREE_OPERAND_LENGTH (expr);
3793 for (i = 0; i < n; i++)
3794 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3795 return true;
3797 return false;
3800 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3801 into
3803 if (p) if (p)
3804 t1 = a; a;
3805 else or else
3806 t1 = b; b;
3809 The second form is used when *EXPR_P is of type void.
3811 PRE_P points to the list where side effects that must happen before
3812 *EXPR_P should be stored. */
3814 static enum gimplify_status
3815 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3817 tree expr = *expr_p;
3818 tree type = TREE_TYPE (expr);
3819 location_t loc = EXPR_LOCATION (expr);
3820 tree tmp, arm1, arm2;
3821 enum gimplify_status ret;
3822 tree label_true, label_false, label_cont;
3823 bool have_then_clause_p, have_else_clause_p;
3824 gcond *cond_stmt;
3825 enum tree_code pred_code;
3826 gimple_seq seq = NULL;
3828 /* If this COND_EXPR has a value, copy the values into a temporary within
3829 the arms. */
3830 if (!VOID_TYPE_P (type))
3832 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3833 tree result;
3835 /* If either an rvalue is ok or we do not require an lvalue, create the
3836 temporary. But we cannot do that if the type is addressable. */
3837 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3838 && !TREE_ADDRESSABLE (type))
3840 if (gimplify_ctxp->allow_rhs_cond_expr
3841 /* If either branch has side effects or could trap, it can't be
3842 evaluated unconditionally. */
3843 && !TREE_SIDE_EFFECTS (then_)
3844 && !generic_expr_could_trap_p (then_)
3845 && !TREE_SIDE_EFFECTS (else_)
3846 && !generic_expr_could_trap_p (else_))
3847 return gimplify_pure_cond_expr (expr_p, pre_p);
3849 tmp = create_tmp_var (type, "iftmp");
3850 result = tmp;
3853 /* Otherwise, only create and copy references to the values. */
3854 else
3856 type = build_pointer_type (type);
3858 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3859 then_ = build_fold_addr_expr_loc (loc, then_);
3861 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3862 else_ = build_fold_addr_expr_loc (loc, else_);
3864 expr
3865 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3867 tmp = create_tmp_var (type, "iftmp");
3868 result = build_simple_mem_ref_loc (loc, tmp);
3871 /* Build the new then clause, `tmp = then_;'. But don't build the
3872 assignment if the value is void; in C++ it can be if it's a throw. */
3873 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3874 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3876 /* Similarly, build the new else clause, `tmp = else_;'. */
3877 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3878 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3880 TREE_TYPE (expr) = void_type_node;
3881 recalculate_side_effects (expr);
3883 /* Move the COND_EXPR to the prequeue. */
3884 gimplify_stmt (&expr, pre_p);
3886 *expr_p = result;
3887 return GS_ALL_DONE;
3890 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3891 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3892 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3893 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3895 /* Make sure the condition has BOOLEAN_TYPE. */
3896 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3898 /* Break apart && and || conditions. */
3899 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3900 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3902 expr = shortcut_cond_expr (expr);
3904 if (expr != *expr_p)
3906 *expr_p = expr;
3908 /* We can't rely on gimplify_expr to re-gimplify the expanded
3909 form properly, as cleanups might cause the target labels to be
3910 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3911 set up a conditional context. */
3912 gimple_push_condition ();
3913 gimplify_stmt (expr_p, &seq);
3914 gimple_pop_condition (pre_p);
3915 gimple_seq_add_seq (pre_p, seq);
3917 return GS_ALL_DONE;
3921 /* Now do the normal gimplification. */
3923 /* Gimplify condition. */
3924 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3925 fb_rvalue);
3926 if (ret == GS_ERROR)
3927 return GS_ERROR;
3928 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3930 gimple_push_condition ();
3932 have_then_clause_p = have_else_clause_p = false;
3933 if (TREE_OPERAND (expr, 1) != NULL
3934 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3935 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3936 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3937 == current_function_decl)
3938 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3939 have different locations, otherwise we end up with incorrect
3940 location information on the branches. */
3941 && (optimize
3942 || !EXPR_HAS_LOCATION (expr)
3943 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3944 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3946 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3947 have_then_clause_p = true;
3949 else
3950 label_true = create_artificial_label (UNKNOWN_LOCATION);
3951 if (TREE_OPERAND (expr, 2) != NULL
3952 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3953 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3954 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3955 == current_function_decl)
3956 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3957 have different locations, otherwise we end up with incorrect
3958 location information on the branches. */
3959 && (optimize
3960 || !EXPR_HAS_LOCATION (expr)
3961 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3962 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3964 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3965 have_else_clause_p = true;
3967 else
3968 label_false = create_artificial_label (UNKNOWN_LOCATION);
3970 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3971 &arm2);
3972 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3973 label_false);
3974 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
3975 gimplify_seq_add_stmt (&seq, cond_stmt);
3976 gimple_stmt_iterator gsi = gsi_last (seq);
3977 maybe_fold_stmt (&gsi);
3979 label_cont = NULL_TREE;
3980 if (!have_then_clause_p)
3982 /* For if (...) {} else { code; } put label_true after
3983 the else block. */
3984 if (TREE_OPERAND (expr, 1) == NULL_TREE
3985 && !have_else_clause_p
3986 && TREE_OPERAND (expr, 2) != NULL_TREE)
3987 label_cont = label_true;
3988 else
3990 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3991 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3992 /* For if (...) { code; } else {} or
3993 if (...) { code; } else goto label; or
3994 if (...) { code; return; } else { ... }
3995 label_cont isn't needed. */
3996 if (!have_else_clause_p
3997 && TREE_OPERAND (expr, 2) != NULL_TREE
3998 && gimple_seq_may_fallthru (seq))
4000 gimple *g;
4001 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4003 g = gimple_build_goto (label_cont);
4005 /* GIMPLE_COND's are very low level; they have embedded
4006 gotos. This particular embedded goto should not be marked
4007 with the location of the original COND_EXPR, as it would
4008 correspond to the COND_EXPR's condition, not the ELSE or the
4009 THEN arms. To avoid marking it with the wrong location, flag
4010 it as "no location". */
4011 gimple_set_do_not_emit_location (g);
4013 gimplify_seq_add_stmt (&seq, g);
4017 if (!have_else_clause_p)
4019 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4020 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4022 if (label_cont)
4023 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4025 gimple_pop_condition (pre_p);
4026 gimple_seq_add_seq (pre_p, seq);
4028 if (ret == GS_ERROR)
4029 ; /* Do nothing. */
4030 else if (have_then_clause_p || have_else_clause_p)
4031 ret = GS_ALL_DONE;
4032 else
4034 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4035 expr = TREE_OPERAND (expr, 0);
4036 gimplify_stmt (&expr, pre_p);
4039 *expr_p = NULL;
4040 return ret;
4043 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4044 to be marked addressable.
4046 We cannot rely on such an expression being directly markable if a temporary
4047 has been created by the gimplification. In this case, we create another
4048 temporary and initialize it with a copy, which will become a store after we
4049 mark it addressable. This can happen if the front-end passed us something
4050 that it could not mark addressable yet, like a Fortran pass-by-reference
4051 parameter (int) floatvar. */
4053 static void
4054 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4056 while (handled_component_p (*expr_p))
4057 expr_p = &TREE_OPERAND (*expr_p, 0);
4058 if (is_gimple_reg (*expr_p))
4060 /* Do not allow an SSA name as the temporary. */
4061 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4062 DECL_GIMPLE_REG_P (var) = 0;
4063 *expr_p = var;
4067 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4068 a call to __builtin_memcpy. */
4070 static enum gimplify_status
4071 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4072 gimple_seq *seq_p)
4074 tree t, to, to_ptr, from, from_ptr;
4075 gcall *gs;
4076 location_t loc = EXPR_LOCATION (*expr_p);
4078 to = TREE_OPERAND (*expr_p, 0);
4079 from = TREE_OPERAND (*expr_p, 1);
4081 /* Mark the RHS addressable. Beware that it may not be possible to do so
4082 directly if a temporary has been created by the gimplification. */
4083 prepare_gimple_addressable (&from, seq_p);
4085 mark_addressable (from);
4086 from_ptr = build_fold_addr_expr_loc (loc, from);
4087 gimplify_arg (&from_ptr, seq_p, loc);
4089 mark_addressable (to);
4090 to_ptr = build_fold_addr_expr_loc (loc, to);
4091 gimplify_arg (&to_ptr, seq_p, loc);
4093 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4095 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4097 if (want_value)
4099 /* tmp = memcpy() */
4100 t = create_tmp_var (TREE_TYPE (to_ptr));
4101 gimple_call_set_lhs (gs, t);
4102 gimplify_seq_add_stmt (seq_p, gs);
4104 *expr_p = build_simple_mem_ref (t);
4105 return GS_ALL_DONE;
4108 gimplify_seq_add_stmt (seq_p, gs);
4109 *expr_p = NULL;
4110 return GS_ALL_DONE;
4113 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4114 a call to __builtin_memset. In this case we know that the RHS is
4115 a CONSTRUCTOR with an empty element list. */
4117 static enum gimplify_status
4118 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4119 gimple_seq *seq_p)
4121 tree t, from, to, to_ptr;
4122 gcall *gs;
4123 location_t loc = EXPR_LOCATION (*expr_p);
4125 /* Assert our assumptions, to abort instead of producing wrong code
4126 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4127 not be immediately exposed. */
4128 from = TREE_OPERAND (*expr_p, 1);
4129 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4130 from = TREE_OPERAND (from, 0);
4132 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4133 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4135 /* Now proceed. */
4136 to = TREE_OPERAND (*expr_p, 0);
4138 to_ptr = build_fold_addr_expr_loc (loc, to);
4139 gimplify_arg (&to_ptr, seq_p, loc);
4140 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4142 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4144 if (want_value)
4146 /* tmp = memset() */
4147 t = create_tmp_var (TREE_TYPE (to_ptr));
4148 gimple_call_set_lhs (gs, t);
4149 gimplify_seq_add_stmt (seq_p, gs);
4151 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4152 return GS_ALL_DONE;
4155 gimplify_seq_add_stmt (seq_p, gs);
4156 *expr_p = NULL;
4157 return GS_ALL_DONE;
4160 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4161 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4162 assignment. Return non-null if we detect a potential overlap. */
4164 struct gimplify_init_ctor_preeval_data
4166 /* The base decl of the lhs object. May be NULL, in which case we
4167 have to assume the lhs is indirect. */
4168 tree lhs_base_decl;
4170 /* The alias set of the lhs object. */
4171 alias_set_type lhs_alias_set;
4174 static tree
4175 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4177 struct gimplify_init_ctor_preeval_data *data
4178 = (struct gimplify_init_ctor_preeval_data *) xdata;
4179 tree t = *tp;
4181 /* If we find the base object, obviously we have overlap. */
4182 if (data->lhs_base_decl == t)
4183 return t;
4185 /* If the constructor component is indirect, determine if we have a
4186 potential overlap with the lhs. The only bits of information we
4187 have to go on at this point are addressability and alias sets. */
4188 if ((INDIRECT_REF_P (t)
4189 || TREE_CODE (t) == MEM_REF)
4190 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4191 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4192 return t;
4194 /* If the constructor component is a call, determine if it can hide a
4195 potential overlap with the lhs through an INDIRECT_REF like above.
4196 ??? Ugh - this is completely broken. In fact this whole analysis
4197 doesn't look conservative. */
4198 if (TREE_CODE (t) == CALL_EXPR)
4200 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4202 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4203 if (POINTER_TYPE_P (TREE_VALUE (type))
4204 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4205 && alias_sets_conflict_p (data->lhs_alias_set,
4206 get_alias_set
4207 (TREE_TYPE (TREE_VALUE (type)))))
4208 return t;
4211 if (IS_TYPE_OR_DECL_P (t))
4212 *walk_subtrees = 0;
4213 return NULL;
4216 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4217 force values that overlap with the lhs (as described by *DATA)
4218 into temporaries. */
4220 static void
4221 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4222 struct gimplify_init_ctor_preeval_data *data)
4224 enum gimplify_status one;
4226 /* If the value is constant, then there's nothing to pre-evaluate. */
4227 if (TREE_CONSTANT (*expr_p))
4229 /* Ensure it does not have side effects, it might contain a reference to
4230 the object we're initializing. */
4231 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4232 return;
4235 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4236 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4237 return;
4239 /* Recurse for nested constructors. */
4240 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4242 unsigned HOST_WIDE_INT ix;
4243 constructor_elt *ce;
4244 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4246 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4247 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4249 return;
4252 /* If this is a variable sized type, we must remember the size. */
4253 maybe_with_size_expr (expr_p);
4255 /* Gimplify the constructor element to something appropriate for the rhs
4256 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4257 the gimplifier will consider this a store to memory. Doing this
4258 gimplification now means that we won't have to deal with complicated
4259 language-specific trees, nor trees like SAVE_EXPR that can induce
4260 exponential search behavior. */
4261 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4262 if (one == GS_ERROR)
4264 *expr_p = NULL;
4265 return;
4268 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4269 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4270 always be true for all scalars, since is_gimple_mem_rhs insists on a
4271 temporary variable for them. */
4272 if (DECL_P (*expr_p))
4273 return;
4275 /* If this is of variable size, we have no choice but to assume it doesn't
4276 overlap since we can't make a temporary for it. */
4277 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4278 return;
4280 /* Otherwise, we must search for overlap ... */
4281 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4282 return;
4284 /* ... and if found, force the value into a temporary. */
4285 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4288 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4289 a RANGE_EXPR in a CONSTRUCTOR for an array.
4291 var = lower;
4292 loop_entry:
4293 object[var] = value;
4294 if (var == upper)
4295 goto loop_exit;
4296 var = var + 1;
4297 goto loop_entry;
4298 loop_exit:
4300 We increment var _after_ the loop exit check because we might otherwise
4301 fail if upper == TYPE_MAX_VALUE (type for upper).
4303 Note that we never have to deal with SAVE_EXPRs here, because this has
4304 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4306 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4307 gimple_seq *, bool);
4309 static void
4310 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4311 tree value, tree array_elt_type,
4312 gimple_seq *pre_p, bool cleared)
4314 tree loop_entry_label, loop_exit_label, fall_thru_label;
4315 tree var, var_type, cref, tmp;
4317 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4318 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4319 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4321 /* Create and initialize the index variable. */
4322 var_type = TREE_TYPE (upper);
4323 var = create_tmp_var (var_type);
4324 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4326 /* Add the loop entry label. */
4327 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4329 /* Build the reference. */
4330 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4331 var, NULL_TREE, NULL_TREE);
4333 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4334 the store. Otherwise just assign value to the reference. */
4336 if (TREE_CODE (value) == CONSTRUCTOR)
4337 /* NB we might have to call ourself recursively through
4338 gimplify_init_ctor_eval if the value is a constructor. */
4339 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4340 pre_p, cleared);
4341 else
4342 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4344 /* We exit the loop when the index var is equal to the upper bound. */
4345 gimplify_seq_add_stmt (pre_p,
4346 gimple_build_cond (EQ_EXPR, var, upper,
4347 loop_exit_label, fall_thru_label));
4349 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4351 /* Otherwise, increment the index var... */
4352 tmp = build2 (PLUS_EXPR, var_type, var,
4353 fold_convert (var_type, integer_one_node));
4354 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4356 /* ...and jump back to the loop entry. */
4357 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4359 /* Add the loop exit label. */
4360 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4363 /* Return true if FDECL is accessing a field that is zero sized. */
4365 static bool
4366 zero_sized_field_decl (const_tree fdecl)
4368 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4369 && integer_zerop (DECL_SIZE (fdecl)))
4370 return true;
4371 return false;
4374 /* Return true if TYPE is zero sized. */
4376 static bool
4377 zero_sized_type (const_tree type)
4379 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4380 && integer_zerop (TYPE_SIZE (type)))
4381 return true;
4382 return false;
4385 /* A subroutine of gimplify_init_constructor. Generate individual
4386 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4387 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4388 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4389 zeroed first. */
4391 static void
4392 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4393 gimple_seq *pre_p, bool cleared)
4395 tree array_elt_type = NULL;
4396 unsigned HOST_WIDE_INT ix;
4397 tree purpose, value;
4399 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4400 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4402 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4404 tree cref;
4406 /* NULL values are created above for gimplification errors. */
4407 if (value == NULL)
4408 continue;
4410 if (cleared && initializer_zerop (value))
4411 continue;
4413 /* ??? Here's to hoping the front end fills in all of the indices,
4414 so we don't have to figure out what's missing ourselves. */
4415 gcc_assert (purpose);
4417 /* Skip zero-sized fields, unless value has side-effects. This can
4418 happen with calls to functions returning a zero-sized type, which
4419 we shouldn't discard. As a number of downstream passes don't
4420 expect sets of zero-sized fields, we rely on the gimplification of
4421 the MODIFY_EXPR we make below to drop the assignment statement. */
4422 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4423 continue;
4425 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4426 whole range. */
4427 if (TREE_CODE (purpose) == RANGE_EXPR)
4429 tree lower = TREE_OPERAND (purpose, 0);
4430 tree upper = TREE_OPERAND (purpose, 1);
4432 /* If the lower bound is equal to upper, just treat it as if
4433 upper was the index. */
4434 if (simple_cst_equal (lower, upper))
4435 purpose = upper;
4436 else
4438 gimplify_init_ctor_eval_range (object, lower, upper, value,
4439 array_elt_type, pre_p, cleared);
4440 continue;
4444 if (array_elt_type)
4446 /* Do not use bitsizetype for ARRAY_REF indices. */
4447 if (TYPE_DOMAIN (TREE_TYPE (object)))
4448 purpose
4449 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4450 purpose);
4451 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4452 purpose, NULL_TREE, NULL_TREE);
4454 else
4456 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4457 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4458 unshare_expr (object), purpose, NULL_TREE);
4461 if (TREE_CODE (value) == CONSTRUCTOR
4462 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4463 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4464 pre_p, cleared);
4465 else
4467 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4468 gimplify_and_add (init, pre_p);
4469 ggc_free (init);
4474 /* Return the appropriate RHS predicate for this LHS. */
4476 gimple_predicate
4477 rhs_predicate_for (tree lhs)
4479 if (is_gimple_reg (lhs))
4480 return is_gimple_reg_rhs_or_call;
4481 else
4482 return is_gimple_mem_rhs_or_call;
4485 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4486 before the LHS has been gimplified. */
4488 static gimple_predicate
4489 initial_rhs_predicate_for (tree lhs)
4491 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4492 return is_gimple_reg_rhs_or_call;
4493 else
4494 return is_gimple_mem_rhs_or_call;
4497 /* Gimplify a C99 compound literal expression. This just means adding
4498 the DECL_EXPR before the current statement and using its anonymous
4499 decl instead. */
4501 static enum gimplify_status
4502 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4503 bool (*gimple_test_f) (tree),
4504 fallback_t fallback)
4506 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4507 tree decl = DECL_EXPR_DECL (decl_s);
4508 tree init = DECL_INITIAL (decl);
4509 /* Mark the decl as addressable if the compound literal
4510 expression is addressable now, otherwise it is marked too late
4511 after we gimplify the initialization expression. */
4512 if (TREE_ADDRESSABLE (*expr_p))
4513 TREE_ADDRESSABLE (decl) = 1;
4514 /* Otherwise, if we don't need an lvalue and have a literal directly
4515 substitute it. Check if it matches the gimple predicate, as
4516 otherwise we'd generate a new temporary, and we can as well just
4517 use the decl we already have. */
4518 else if (!TREE_ADDRESSABLE (decl)
4519 && init
4520 && (fallback & fb_lvalue) == 0
4521 && gimple_test_f (init))
4523 *expr_p = init;
4524 return GS_OK;
4527 /* Preliminarily mark non-addressed complex variables as eligible
4528 for promotion to gimple registers. We'll transform their uses
4529 as we find them. */
4530 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4531 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4532 && !TREE_THIS_VOLATILE (decl)
4533 && !needs_to_live_in_memory (decl))
4534 DECL_GIMPLE_REG_P (decl) = 1;
4536 /* If the decl is not addressable, then it is being used in some
4537 expression or on the right hand side of a statement, and it can
4538 be put into a readonly data section. */
4539 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4540 TREE_READONLY (decl) = 1;
4542 /* This decl isn't mentioned in the enclosing block, so add it to the
4543 list of temps. FIXME it seems a bit of a kludge to say that
4544 anonymous artificial vars aren't pushed, but everything else is. */
4545 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4546 gimple_add_tmp_var (decl);
4548 gimplify_and_add (decl_s, pre_p);
4549 *expr_p = decl;
4550 return GS_OK;
4553 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4554 return a new CONSTRUCTOR if something changed. */
4556 static tree
4557 optimize_compound_literals_in_ctor (tree orig_ctor)
4559 tree ctor = orig_ctor;
4560 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4561 unsigned int idx, num = vec_safe_length (elts);
4563 for (idx = 0; idx < num; idx++)
4565 tree value = (*elts)[idx].value;
4566 tree newval = value;
4567 if (TREE_CODE (value) == CONSTRUCTOR)
4568 newval = optimize_compound_literals_in_ctor (value);
4569 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4571 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4572 tree decl = DECL_EXPR_DECL (decl_s);
4573 tree init = DECL_INITIAL (decl);
4575 if (!TREE_ADDRESSABLE (value)
4576 && !TREE_ADDRESSABLE (decl)
4577 && init
4578 && TREE_CODE (init) == CONSTRUCTOR)
4579 newval = optimize_compound_literals_in_ctor (init);
4581 if (newval == value)
4582 continue;
4584 if (ctor == orig_ctor)
4586 ctor = copy_node (orig_ctor);
4587 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4588 elts = CONSTRUCTOR_ELTS (ctor);
4590 (*elts)[idx].value = newval;
4592 return ctor;
4595 /* A subroutine of gimplify_modify_expr. Break out elements of a
4596 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4598 Note that we still need to clear any elements that don't have explicit
4599 initializers, so if not all elements are initialized we keep the
4600 original MODIFY_EXPR, we just remove all of the constructor elements.
4602 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4603 GS_ERROR if we would have to create a temporary when gimplifying
4604 this constructor. Otherwise, return GS_OK.
4606 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4608 static enum gimplify_status
4609 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4610 bool want_value, bool notify_temp_creation)
4612 tree object, ctor, type;
4613 enum gimplify_status ret;
4614 vec<constructor_elt, va_gc> *elts;
4616 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4618 if (!notify_temp_creation)
4620 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4621 is_gimple_lvalue, fb_lvalue);
4622 if (ret == GS_ERROR)
4623 return ret;
4626 object = TREE_OPERAND (*expr_p, 0);
4627 ctor = TREE_OPERAND (*expr_p, 1)
4628 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4629 type = TREE_TYPE (ctor);
4630 elts = CONSTRUCTOR_ELTS (ctor);
4631 ret = GS_ALL_DONE;
4633 switch (TREE_CODE (type))
4635 case RECORD_TYPE:
4636 case UNION_TYPE:
4637 case QUAL_UNION_TYPE:
4638 case ARRAY_TYPE:
4640 struct gimplify_init_ctor_preeval_data preeval_data;
4641 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4642 bool cleared, complete_p, valid_const_initializer;
4644 /* Aggregate types must lower constructors to initialization of
4645 individual elements. The exception is that a CONSTRUCTOR node
4646 with no elements indicates zero-initialization of the whole. */
4647 if (vec_safe_is_empty (elts))
4649 if (notify_temp_creation)
4650 return GS_OK;
4651 break;
4654 /* Fetch information about the constructor to direct later processing.
4655 We might want to make static versions of it in various cases, and
4656 can only do so if it known to be a valid constant initializer. */
4657 valid_const_initializer
4658 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4659 &num_ctor_elements, &complete_p);
4661 /* If a const aggregate variable is being initialized, then it
4662 should never be a lose to promote the variable to be static. */
4663 if (valid_const_initializer
4664 && num_nonzero_elements > 1
4665 && TREE_READONLY (object)
4666 && VAR_P (object)
4667 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4669 if (notify_temp_creation)
4670 return GS_ERROR;
4671 DECL_INITIAL (object) = ctor;
4672 TREE_STATIC (object) = 1;
4673 if (!DECL_NAME (object))
4674 DECL_NAME (object) = create_tmp_var_name ("C");
4675 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4677 /* ??? C++ doesn't automatically append a .<number> to the
4678 assembler name, and even when it does, it looks at FE private
4679 data structures to figure out what that number should be,
4680 which are not set for this variable. I suppose this is
4681 important for local statics for inline functions, which aren't
4682 "local" in the object file sense. So in order to get a unique
4683 TU-local symbol, we must invoke the lhd version now. */
4684 lhd_set_decl_assembler_name (object);
4686 *expr_p = NULL_TREE;
4687 break;
4690 /* If there are "lots" of initialized elements, even discounting
4691 those that are not address constants (and thus *must* be
4692 computed at runtime), then partition the constructor into
4693 constant and non-constant parts. Block copy the constant
4694 parts in, then generate code for the non-constant parts. */
4695 /* TODO. There's code in cp/typeck.c to do this. */
4697 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4698 /* store_constructor will ignore the clearing of variable-sized
4699 objects. Initializers for such objects must explicitly set
4700 every field that needs to be set. */
4701 cleared = false;
4702 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
4703 /* If the constructor isn't complete, clear the whole object
4704 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4706 ??? This ought not to be needed. For any element not present
4707 in the initializer, we should simply set them to zero. Except
4708 we'd need to *find* the elements that are not present, and that
4709 requires trickery to avoid quadratic compile-time behavior in
4710 large cases or excessive memory use in small cases. */
4711 cleared = true;
4712 else if (num_ctor_elements - num_nonzero_elements
4713 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4714 && num_nonzero_elements < num_ctor_elements / 4)
4715 /* If there are "lots" of zeros, it's more efficient to clear
4716 the memory and then set the nonzero elements. */
4717 cleared = true;
4718 else
4719 cleared = false;
4721 /* If there are "lots" of initialized elements, and all of them
4722 are valid address constants, then the entire initializer can
4723 be dropped to memory, and then memcpy'd out. Don't do this
4724 for sparse arrays, though, as it's more efficient to follow
4725 the standard CONSTRUCTOR behavior of memset followed by
4726 individual element initialization. Also don't do this for small
4727 all-zero initializers (which aren't big enough to merit
4728 clearing), and don't try to make bitwise copies of
4729 TREE_ADDRESSABLE types.
4731 We cannot apply such transformation when compiling chkp static
4732 initializer because creation of initializer image in the memory
4733 will require static initialization of bounds for it. It should
4734 result in another gimplification of similar initializer and we
4735 may fall into infinite loop. */
4736 if (valid_const_initializer
4737 && !(cleared || num_nonzero_elements == 0)
4738 && !TREE_ADDRESSABLE (type)
4739 && (!current_function_decl
4740 || !lookup_attribute ("chkp ctor",
4741 DECL_ATTRIBUTES (current_function_decl))))
4743 HOST_WIDE_INT size = int_size_in_bytes (type);
4744 unsigned int align;
4746 /* ??? We can still get unbounded array types, at least
4747 from the C++ front end. This seems wrong, but attempt
4748 to work around it for now. */
4749 if (size < 0)
4751 size = int_size_in_bytes (TREE_TYPE (object));
4752 if (size >= 0)
4753 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4756 /* Find the maximum alignment we can assume for the object. */
4757 /* ??? Make use of DECL_OFFSET_ALIGN. */
4758 if (DECL_P (object))
4759 align = DECL_ALIGN (object);
4760 else
4761 align = TYPE_ALIGN (type);
4763 /* Do a block move either if the size is so small as to make
4764 each individual move a sub-unit move on average, or if it
4765 is so large as to make individual moves inefficient. */
4766 if (size > 0
4767 && num_nonzero_elements > 1
4768 && (size < num_nonzero_elements
4769 || !can_move_by_pieces (size, align)))
4771 if (notify_temp_creation)
4772 return GS_ERROR;
4774 walk_tree (&ctor, force_labels_r, NULL, NULL);
4775 ctor = tree_output_constant_def (ctor);
4776 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4777 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4778 TREE_OPERAND (*expr_p, 1) = ctor;
4780 /* This is no longer an assignment of a CONSTRUCTOR, but
4781 we still may have processing to do on the LHS. So
4782 pretend we didn't do anything here to let that happen. */
4783 return GS_UNHANDLED;
4787 /* If the target is volatile, we have non-zero elements and more than
4788 one field to assign, initialize the target from a temporary. */
4789 if (TREE_THIS_VOLATILE (object)
4790 && !TREE_ADDRESSABLE (type)
4791 && num_nonzero_elements > 0
4792 && vec_safe_length (elts) > 1)
4794 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4795 TREE_OPERAND (*expr_p, 0) = temp;
4796 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4797 *expr_p,
4798 build2 (MODIFY_EXPR, void_type_node,
4799 object, temp));
4800 return GS_OK;
4803 if (notify_temp_creation)
4804 return GS_OK;
4806 /* If there are nonzero elements and if needed, pre-evaluate to capture
4807 elements overlapping with the lhs into temporaries. We must do this
4808 before clearing to fetch the values before they are zeroed-out. */
4809 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4811 preeval_data.lhs_base_decl = get_base_address (object);
4812 if (!DECL_P (preeval_data.lhs_base_decl))
4813 preeval_data.lhs_base_decl = NULL;
4814 preeval_data.lhs_alias_set = get_alias_set (object);
4816 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4817 pre_p, post_p, &preeval_data);
4820 bool ctor_has_side_effects_p
4821 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4823 if (cleared)
4825 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4826 Note that we still have to gimplify, in order to handle the
4827 case of variable sized types. Avoid shared tree structures. */
4828 CONSTRUCTOR_ELTS (ctor) = NULL;
4829 TREE_SIDE_EFFECTS (ctor) = 0;
4830 object = unshare_expr (object);
4831 gimplify_stmt (expr_p, pre_p);
4834 /* If we have not block cleared the object, or if there are nonzero
4835 elements in the constructor, or if the constructor has side effects,
4836 add assignments to the individual scalar fields of the object. */
4837 if (!cleared
4838 || num_nonzero_elements > 0
4839 || ctor_has_side_effects_p)
4840 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4842 *expr_p = NULL_TREE;
4844 break;
4846 case COMPLEX_TYPE:
4848 tree r, i;
4850 if (notify_temp_creation)
4851 return GS_OK;
4853 /* Extract the real and imaginary parts out of the ctor. */
4854 gcc_assert (elts->length () == 2);
4855 r = (*elts)[0].value;
4856 i = (*elts)[1].value;
4857 if (r == NULL || i == NULL)
4859 tree zero = build_zero_cst (TREE_TYPE (type));
4860 if (r == NULL)
4861 r = zero;
4862 if (i == NULL)
4863 i = zero;
4866 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4867 represent creation of a complex value. */
4868 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4870 ctor = build_complex (type, r, i);
4871 TREE_OPERAND (*expr_p, 1) = ctor;
4873 else
4875 ctor = build2 (COMPLEX_EXPR, type, r, i);
4876 TREE_OPERAND (*expr_p, 1) = ctor;
4877 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4878 pre_p,
4879 post_p,
4880 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4881 fb_rvalue);
4884 break;
4886 case VECTOR_TYPE:
4888 unsigned HOST_WIDE_INT ix;
4889 constructor_elt *ce;
4891 if (notify_temp_creation)
4892 return GS_OK;
4894 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4895 if (TREE_CONSTANT (ctor))
4897 bool constant_p = true;
4898 tree value;
4900 /* Even when ctor is constant, it might contain non-*_CST
4901 elements, such as addresses or trapping values like
4902 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4903 in VECTOR_CST nodes. */
4904 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4905 if (!CONSTANT_CLASS_P (value))
4907 constant_p = false;
4908 break;
4911 if (constant_p)
4913 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4914 break;
4917 TREE_CONSTANT (ctor) = 0;
4920 /* Vector types use CONSTRUCTOR all the way through gimple
4921 compilation as a general initializer. */
4922 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4924 enum gimplify_status tret;
4925 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4926 fb_rvalue);
4927 if (tret == GS_ERROR)
4928 ret = GS_ERROR;
4929 else if (TREE_STATIC (ctor)
4930 && !initializer_constant_valid_p (ce->value,
4931 TREE_TYPE (ce->value)))
4932 TREE_STATIC (ctor) = 0;
4934 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4935 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4937 break;
4939 default:
4940 /* So how did we get a CONSTRUCTOR for a scalar type? */
4941 gcc_unreachable ();
4944 if (ret == GS_ERROR)
4945 return GS_ERROR;
4946 /* If we have gimplified both sides of the initializer but have
4947 not emitted an assignment, do so now. */
4948 if (*expr_p)
4950 tree lhs = TREE_OPERAND (*expr_p, 0);
4951 tree rhs = TREE_OPERAND (*expr_p, 1);
4952 if (want_value && object == lhs)
4953 lhs = unshare_expr (lhs);
4954 gassign *init = gimple_build_assign (lhs, rhs);
4955 gimplify_seq_add_stmt (pre_p, init);
4957 if (want_value)
4959 *expr_p = object;
4960 return GS_OK;
4962 else
4964 *expr_p = NULL;
4965 return GS_ALL_DONE;
4969 /* Given a pointer value OP0, return a simplified version of an
4970 indirection through OP0, or NULL_TREE if no simplification is
4971 possible. This may only be applied to a rhs of an expression.
4972 Note that the resulting type may be different from the type pointed
4973 to in the sense that it is still compatible from the langhooks
4974 point of view. */
4976 static tree
4977 gimple_fold_indirect_ref_rhs (tree t)
4979 return gimple_fold_indirect_ref (t);
4982 /* Subroutine of gimplify_modify_expr to do simplifications of
4983 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4984 something changes. */
4986 static enum gimplify_status
4987 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4988 gimple_seq *pre_p, gimple_seq *post_p,
4989 bool want_value)
4991 enum gimplify_status ret = GS_UNHANDLED;
4992 bool changed;
4996 changed = false;
4997 switch (TREE_CODE (*from_p))
4999 case VAR_DECL:
5000 /* If we're assigning from a read-only variable initialized with
5001 a constructor, do the direct assignment from the constructor,
5002 but only if neither source nor target are volatile since this
5003 latter assignment might end up being done on a per-field basis. */
5004 if (DECL_INITIAL (*from_p)
5005 && TREE_READONLY (*from_p)
5006 && !TREE_THIS_VOLATILE (*from_p)
5007 && !TREE_THIS_VOLATILE (*to_p)
5008 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5010 tree old_from = *from_p;
5011 enum gimplify_status subret;
5013 /* Move the constructor into the RHS. */
5014 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5016 /* Let's see if gimplify_init_constructor will need to put
5017 it in memory. */
5018 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5019 false, true);
5020 if (subret == GS_ERROR)
5022 /* If so, revert the change. */
5023 *from_p = old_from;
5025 else
5027 ret = GS_OK;
5028 changed = true;
5031 break;
5032 case INDIRECT_REF:
5034 /* If we have code like
5036 *(const A*)(A*)&x
5038 where the type of "x" is a (possibly cv-qualified variant
5039 of "A"), treat the entire expression as identical to "x".
5040 This kind of code arises in C++ when an object is bound
5041 to a const reference, and if "x" is a TARGET_EXPR we want
5042 to take advantage of the optimization below. */
5043 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5044 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5045 if (t)
5047 if (TREE_THIS_VOLATILE (t) != volatile_p)
5049 if (DECL_P (t))
5050 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5051 build_fold_addr_expr (t));
5052 if (REFERENCE_CLASS_P (t))
5053 TREE_THIS_VOLATILE (t) = volatile_p;
5055 *from_p = t;
5056 ret = GS_OK;
5057 changed = true;
5059 break;
5062 case TARGET_EXPR:
5064 /* If we are initializing something from a TARGET_EXPR, strip the
5065 TARGET_EXPR and initialize it directly, if possible. This can't
5066 be done if the initializer is void, since that implies that the
5067 temporary is set in some non-trivial way.
5069 ??? What about code that pulls out the temp and uses it
5070 elsewhere? I think that such code never uses the TARGET_EXPR as
5071 an initializer. If I'm wrong, we'll die because the temp won't
5072 have any RTL. In that case, I guess we'll need to replace
5073 references somehow. */
5074 tree init = TARGET_EXPR_INITIAL (*from_p);
5076 if (init
5077 && !VOID_TYPE_P (TREE_TYPE (init)))
5079 *from_p = init;
5080 ret = GS_OK;
5081 changed = true;
5084 break;
5086 case COMPOUND_EXPR:
5087 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5088 caught. */
5089 gimplify_compound_expr (from_p, pre_p, true);
5090 ret = GS_OK;
5091 changed = true;
5092 break;
5094 case CONSTRUCTOR:
5095 /* If we already made some changes, let the front end have a
5096 crack at this before we break it down. */
5097 if (ret != GS_UNHANDLED)
5098 break;
5099 /* If we're initializing from a CONSTRUCTOR, break this into
5100 individual MODIFY_EXPRs. */
5101 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5102 false);
5104 case COND_EXPR:
5105 /* If we're assigning to a non-register type, push the assignment
5106 down into the branches. This is mandatory for ADDRESSABLE types,
5107 since we cannot generate temporaries for such, but it saves a
5108 copy in other cases as well. */
5109 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5111 /* This code should mirror the code in gimplify_cond_expr. */
5112 enum tree_code code = TREE_CODE (*expr_p);
5113 tree cond = *from_p;
5114 tree result = *to_p;
5116 ret = gimplify_expr (&result, pre_p, post_p,
5117 is_gimple_lvalue, fb_lvalue);
5118 if (ret != GS_ERROR)
5119 ret = GS_OK;
5121 /* If we are going to write RESULT more than once, clear
5122 TREE_READONLY flag, otherwise we might incorrectly promote
5123 the variable to static const and initialize it at compile
5124 time in one of the branches. */
5125 if (VAR_P (result)
5126 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5127 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5128 TREE_READONLY (result) = 0;
5129 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5130 TREE_OPERAND (cond, 1)
5131 = build2 (code, void_type_node, result,
5132 TREE_OPERAND (cond, 1));
5133 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5134 TREE_OPERAND (cond, 2)
5135 = build2 (code, void_type_node, unshare_expr (result),
5136 TREE_OPERAND (cond, 2));
5138 TREE_TYPE (cond) = void_type_node;
5139 recalculate_side_effects (cond);
5141 if (want_value)
5143 gimplify_and_add (cond, pre_p);
5144 *expr_p = unshare_expr (result);
5146 else
5147 *expr_p = cond;
5148 return ret;
5150 break;
5152 case CALL_EXPR:
5153 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5154 return slot so that we don't generate a temporary. */
5155 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5156 && aggregate_value_p (*from_p, *from_p))
5158 bool use_target;
5160 if (!(rhs_predicate_for (*to_p))(*from_p))
5161 /* If we need a temporary, *to_p isn't accurate. */
5162 use_target = false;
5163 /* It's OK to use the return slot directly unless it's an NRV. */
5164 else if (TREE_CODE (*to_p) == RESULT_DECL
5165 && DECL_NAME (*to_p) == NULL_TREE
5166 && needs_to_live_in_memory (*to_p))
5167 use_target = true;
5168 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5169 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5170 /* Don't force regs into memory. */
5171 use_target = false;
5172 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5173 /* It's OK to use the target directly if it's being
5174 initialized. */
5175 use_target = true;
5176 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5177 != INTEGER_CST)
5178 /* Always use the target and thus RSO for variable-sized types.
5179 GIMPLE cannot deal with a variable-sized assignment
5180 embedded in a call statement. */
5181 use_target = true;
5182 else if (TREE_CODE (*to_p) != SSA_NAME
5183 && (!is_gimple_variable (*to_p)
5184 || needs_to_live_in_memory (*to_p)))
5185 /* Don't use the original target if it's already addressable;
5186 if its address escapes, and the called function uses the
5187 NRV optimization, a conforming program could see *to_p
5188 change before the called function returns; see c++/19317.
5189 When optimizing, the return_slot pass marks more functions
5190 as safe after we have escape info. */
5191 use_target = false;
5192 else
5193 use_target = true;
5195 if (use_target)
5197 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5198 mark_addressable (*to_p);
5201 break;
5203 case WITH_SIZE_EXPR:
5204 /* Likewise for calls that return an aggregate of non-constant size,
5205 since we would not be able to generate a temporary at all. */
5206 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5208 *from_p = TREE_OPERAND (*from_p, 0);
5209 /* We don't change ret in this case because the
5210 WITH_SIZE_EXPR might have been added in
5211 gimplify_modify_expr, so returning GS_OK would lead to an
5212 infinite loop. */
5213 changed = true;
5215 break;
5217 /* If we're initializing from a container, push the initialization
5218 inside it. */
5219 case CLEANUP_POINT_EXPR:
5220 case BIND_EXPR:
5221 case STATEMENT_LIST:
5223 tree wrap = *from_p;
5224 tree t;
5226 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5227 fb_lvalue);
5228 if (ret != GS_ERROR)
5229 ret = GS_OK;
5231 t = voidify_wrapper_expr (wrap, *expr_p);
5232 gcc_assert (t == *expr_p);
5234 if (want_value)
5236 gimplify_and_add (wrap, pre_p);
5237 *expr_p = unshare_expr (*to_p);
5239 else
5240 *expr_p = wrap;
5241 return GS_OK;
5244 case COMPOUND_LITERAL_EXPR:
5246 tree complit = TREE_OPERAND (*expr_p, 1);
5247 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5248 tree decl = DECL_EXPR_DECL (decl_s);
5249 tree init = DECL_INITIAL (decl);
5251 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5252 into struct T x = { 0, 1, 2 } if the address of the
5253 compound literal has never been taken. */
5254 if (!TREE_ADDRESSABLE (complit)
5255 && !TREE_ADDRESSABLE (decl)
5256 && init)
5258 *expr_p = copy_node (*expr_p);
5259 TREE_OPERAND (*expr_p, 1) = init;
5260 return GS_OK;
5264 default:
5265 break;
5268 while (changed);
5270 return ret;
5274 /* Return true if T looks like a valid GIMPLE statement. */
5276 static bool
5277 is_gimple_stmt (tree t)
5279 const enum tree_code code = TREE_CODE (t);
5281 switch (code)
5283 case NOP_EXPR:
5284 /* The only valid NOP_EXPR is the empty statement. */
5285 return IS_EMPTY_STMT (t);
5287 case BIND_EXPR:
5288 case COND_EXPR:
5289 /* These are only valid if they're void. */
5290 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5292 case SWITCH_EXPR:
5293 case GOTO_EXPR:
5294 case RETURN_EXPR:
5295 case LABEL_EXPR:
5296 case CASE_LABEL_EXPR:
5297 case TRY_CATCH_EXPR:
5298 case TRY_FINALLY_EXPR:
5299 case EH_FILTER_EXPR:
5300 case CATCH_EXPR:
5301 case ASM_EXPR:
5302 case STATEMENT_LIST:
5303 case OACC_PARALLEL:
5304 case OACC_KERNELS:
5305 case OACC_DATA:
5306 case OACC_HOST_DATA:
5307 case OACC_DECLARE:
5308 case OACC_UPDATE:
5309 case OACC_ENTER_DATA:
5310 case OACC_EXIT_DATA:
5311 case OACC_CACHE:
5312 case OMP_PARALLEL:
5313 case OMP_FOR:
5314 case OMP_SIMD:
5315 case CILK_SIMD:
5316 case OMP_DISTRIBUTE:
5317 case OACC_LOOP:
5318 case OMP_SECTIONS:
5319 case OMP_SECTION:
5320 case OMP_SINGLE:
5321 case OMP_MASTER:
5322 case OMP_TASKGROUP:
5323 case OMP_ORDERED:
5324 case OMP_CRITICAL:
5325 case OMP_TASK:
5326 case OMP_TARGET:
5327 case OMP_TARGET_DATA:
5328 case OMP_TARGET_UPDATE:
5329 case OMP_TARGET_ENTER_DATA:
5330 case OMP_TARGET_EXIT_DATA:
5331 case OMP_TASKLOOP:
5332 case OMP_TEAMS:
5333 /* These are always void. */
5334 return true;
5336 case CALL_EXPR:
5337 case MODIFY_EXPR:
5338 case PREDICT_EXPR:
5339 /* These are valid regardless of their type. */
5340 return true;
5342 default:
5343 return false;
5348 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5349 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5350 DECL_GIMPLE_REG_P set.
5352 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5353 other, unmodified part of the complex object just before the total store.
5354 As a consequence, if the object is still uninitialized, an undefined value
5355 will be loaded into a register, which may result in a spurious exception
5356 if the register is floating-point and the value happens to be a signaling
5357 NaN for example. Then the fully-fledged complex operations lowering pass
5358 followed by a DCE pass are necessary in order to fix things up. */
5360 static enum gimplify_status
5361 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5362 bool want_value)
5364 enum tree_code code, ocode;
5365 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5367 lhs = TREE_OPERAND (*expr_p, 0);
5368 rhs = TREE_OPERAND (*expr_p, 1);
5369 code = TREE_CODE (lhs);
5370 lhs = TREE_OPERAND (lhs, 0);
5372 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5373 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5374 TREE_NO_WARNING (other) = 1;
5375 other = get_formal_tmp_var (other, pre_p);
5377 realpart = code == REALPART_EXPR ? rhs : other;
5378 imagpart = code == REALPART_EXPR ? other : rhs;
5380 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5381 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5382 else
5383 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5385 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5386 *expr_p = (want_value) ? rhs : NULL_TREE;
5388 return GS_ALL_DONE;
5391 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5393 modify_expr
5394 : varname '=' rhs
5395 | '*' ID '=' rhs
5397 PRE_P points to the list where side effects that must happen before
5398 *EXPR_P should be stored.
5400 POST_P points to the list where side effects that must happen after
5401 *EXPR_P should be stored.
5403 WANT_VALUE is nonzero iff we want to use the value of this expression
5404 in another expression. */
5406 static enum gimplify_status
5407 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5408 bool want_value)
5410 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5411 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5412 enum gimplify_status ret = GS_UNHANDLED;
5413 gimple *assign;
5414 location_t loc = EXPR_LOCATION (*expr_p);
5415 gimple_stmt_iterator gsi;
5417 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5418 || TREE_CODE (*expr_p) == INIT_EXPR);
5420 /* Trying to simplify a clobber using normal logic doesn't work,
5421 so handle it here. */
5422 if (TREE_CLOBBER_P (*from_p))
5424 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5425 if (ret == GS_ERROR)
5426 return ret;
5427 gcc_assert (!want_value
5428 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
5429 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5430 *expr_p = NULL;
5431 return GS_ALL_DONE;
5434 /* Insert pointer conversions required by the middle-end that are not
5435 required by the frontend. This fixes middle-end type checking for
5436 for example gcc.dg/redecl-6.c. */
5437 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5439 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5440 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5441 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5444 /* See if any simplifications can be done based on what the RHS is. */
5445 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5446 want_value);
5447 if (ret != GS_UNHANDLED)
5448 return ret;
5450 /* For zero sized types only gimplify the left hand side and right hand
5451 side as statements and throw away the assignment. Do this after
5452 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5453 types properly. */
5454 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
5456 gimplify_stmt (from_p, pre_p);
5457 gimplify_stmt (to_p, pre_p);
5458 *expr_p = NULL_TREE;
5459 return GS_ALL_DONE;
5462 /* If the value being copied is of variable width, compute the length
5463 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5464 before gimplifying any of the operands so that we can resolve any
5465 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5466 the size of the expression to be copied, not of the destination, so
5467 that is what we must do here. */
5468 maybe_with_size_expr (from_p);
5470 /* As a special case, we have to temporarily allow for assignments
5471 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5472 a toplevel statement, when gimplifying the GENERIC expression
5473 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5474 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5476 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5477 prevent gimplify_expr from trying to create a new temporary for
5478 foo's LHS, we tell it that it should only gimplify until it
5479 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5480 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5481 and all we need to do here is set 'a' to be its LHS. */
5483 /* Gimplify the RHS first for C++17 and bug 71104. */
5484 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5485 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5486 if (ret == GS_ERROR)
5487 return ret;
5489 /* Then gimplify the LHS. */
5490 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5491 twice we have to make sure to gimplify into non-SSA as otherwise
5492 the abnormal edge added later will make those defs not dominate
5493 their uses.
5494 ??? Technically this applies only to the registers used in the
5495 resulting non-register *TO_P. */
5496 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5497 if (saved_into_ssa
5498 && TREE_CODE (*from_p) == CALL_EXPR
5499 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5500 gimplify_ctxp->into_ssa = false;
5501 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5502 gimplify_ctxp->into_ssa = saved_into_ssa;
5503 if (ret == GS_ERROR)
5504 return ret;
5506 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5507 guess for the predicate was wrong. */
5508 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5509 if (final_pred != initial_pred)
5511 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5512 if (ret == GS_ERROR)
5513 return ret;
5516 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5517 size as argument to the call. */
5518 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5520 tree call = TREE_OPERAND (*from_p, 0);
5521 tree vlasize = TREE_OPERAND (*from_p, 1);
5523 if (TREE_CODE (call) == CALL_EXPR
5524 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5526 int nargs = call_expr_nargs (call);
5527 tree type = TREE_TYPE (call);
5528 tree ap = CALL_EXPR_ARG (call, 0);
5529 tree tag = CALL_EXPR_ARG (call, 1);
5530 tree aptag = CALL_EXPR_ARG (call, 2);
5531 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5532 IFN_VA_ARG, type,
5533 nargs + 1, ap, tag,
5534 aptag, vlasize);
5535 TREE_OPERAND (*from_p, 0) = newcall;
5539 /* Now see if the above changed *from_p to something we handle specially. */
5540 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5541 want_value);
5542 if (ret != GS_UNHANDLED)
5543 return ret;
5545 /* If we've got a variable sized assignment between two lvalues (i.e. does
5546 not involve a call), then we can make things a bit more straightforward
5547 by converting the assignment to memcpy or memset. */
5548 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5550 tree from = TREE_OPERAND (*from_p, 0);
5551 tree size = TREE_OPERAND (*from_p, 1);
5553 if (TREE_CODE (from) == CONSTRUCTOR)
5554 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5556 if (is_gimple_addressable (from))
5558 *from_p = from;
5559 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5560 pre_p);
5564 /* Transform partial stores to non-addressable complex variables into
5565 total stores. This allows us to use real instead of virtual operands
5566 for these variables, which improves optimization. */
5567 if ((TREE_CODE (*to_p) == REALPART_EXPR
5568 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5569 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5570 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5572 /* Try to alleviate the effects of the gimplification creating artificial
5573 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5574 make sure not to create DECL_DEBUG_EXPR links across functions. */
5575 if (!gimplify_ctxp->into_ssa
5576 && VAR_P (*from_p)
5577 && DECL_IGNORED_P (*from_p)
5578 && DECL_P (*to_p)
5579 && !DECL_IGNORED_P (*to_p)
5580 && decl_function_context (*to_p) == current_function_decl)
5582 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5583 DECL_NAME (*from_p)
5584 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5585 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5586 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5589 if (want_value && TREE_THIS_VOLATILE (*to_p))
5590 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5592 if (TREE_CODE (*from_p) == CALL_EXPR)
5594 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5595 instead of a GIMPLE_ASSIGN. */
5596 gcall *call_stmt;
5597 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5599 /* Gimplify internal functions created in the FEs. */
5600 int nargs = call_expr_nargs (*from_p), i;
5601 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5602 auto_vec<tree> vargs (nargs);
5604 for (i = 0; i < nargs; i++)
5606 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5607 EXPR_LOCATION (*from_p));
5608 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5610 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5611 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5613 else
5615 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5616 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5617 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5618 tree fndecl = get_callee_fndecl (*from_p);
5619 if (fndecl
5620 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5621 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5622 && call_expr_nargs (*from_p) == 3)
5623 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5624 CALL_EXPR_ARG (*from_p, 0),
5625 CALL_EXPR_ARG (*from_p, 1),
5626 CALL_EXPR_ARG (*from_p, 2));
5627 else
5629 call_stmt = gimple_build_call_from_tree (*from_p);
5630 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
5633 notice_special_calls (call_stmt);
5634 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5635 gimple_call_set_lhs (call_stmt, *to_p);
5636 else if (TREE_CODE (*to_p) == SSA_NAME)
5637 /* The above is somewhat premature, avoid ICEing later for a
5638 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5639 ??? This doesn't make it a default-def. */
5640 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5642 if (EXPR_CILK_SPAWN (*from_p))
5643 gimplify_cilk_detach (pre_p);
5644 assign = call_stmt;
5646 else
5648 assign = gimple_build_assign (*to_p, *from_p);
5649 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5650 if (COMPARISON_CLASS_P (*from_p))
5651 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5654 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5656 /* We should have got an SSA name from the start. */
5657 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5658 || ! gimple_in_ssa_p (cfun));
5661 gimplify_seq_add_stmt (pre_p, assign);
5662 gsi = gsi_last (*pre_p);
5663 maybe_fold_stmt (&gsi);
5665 if (want_value)
5667 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5668 return GS_OK;
5670 else
5671 *expr_p = NULL;
5673 return GS_ALL_DONE;
5676 /* Gimplify a comparison between two variable-sized objects. Do this
5677 with a call to BUILT_IN_MEMCMP. */
5679 static enum gimplify_status
5680 gimplify_variable_sized_compare (tree *expr_p)
5682 location_t loc = EXPR_LOCATION (*expr_p);
5683 tree op0 = TREE_OPERAND (*expr_p, 0);
5684 tree op1 = TREE_OPERAND (*expr_p, 1);
5685 tree t, arg, dest, src, expr;
5687 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5688 arg = unshare_expr (arg);
5689 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5690 src = build_fold_addr_expr_loc (loc, op1);
5691 dest = build_fold_addr_expr_loc (loc, op0);
5692 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5693 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5695 expr
5696 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5697 SET_EXPR_LOCATION (expr, loc);
5698 *expr_p = expr;
5700 return GS_OK;
5703 /* Gimplify a comparison between two aggregate objects of integral scalar
5704 mode as a comparison between the bitwise equivalent scalar values. */
5706 static enum gimplify_status
5707 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5709 location_t loc = EXPR_LOCATION (*expr_p);
5710 tree op0 = TREE_OPERAND (*expr_p, 0);
5711 tree op1 = TREE_OPERAND (*expr_p, 1);
5713 tree type = TREE_TYPE (op0);
5714 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5716 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5717 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5719 *expr_p
5720 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5722 return GS_OK;
5725 /* Gimplify an expression sequence. This function gimplifies each
5726 expression and rewrites the original expression with the last
5727 expression of the sequence in GIMPLE form.
5729 PRE_P points to the list where the side effects for all the
5730 expressions in the sequence will be emitted.
5732 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5734 static enum gimplify_status
5735 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5737 tree t = *expr_p;
5741 tree *sub_p = &TREE_OPERAND (t, 0);
5743 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5744 gimplify_compound_expr (sub_p, pre_p, false);
5745 else
5746 gimplify_stmt (sub_p, pre_p);
5748 t = TREE_OPERAND (t, 1);
5750 while (TREE_CODE (t) == COMPOUND_EXPR);
5752 *expr_p = t;
5753 if (want_value)
5754 return GS_OK;
5755 else
5757 gimplify_stmt (expr_p, pre_p);
5758 return GS_ALL_DONE;
5762 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5763 gimplify. After gimplification, EXPR_P will point to a new temporary
5764 that holds the original value of the SAVE_EXPR node.
5766 PRE_P points to the list where side effects that must happen before
5767 *EXPR_P should be stored. */
5769 static enum gimplify_status
5770 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5772 enum gimplify_status ret = GS_ALL_DONE;
5773 tree val;
5775 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5776 val = TREE_OPERAND (*expr_p, 0);
5778 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5779 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5781 /* The operand may be a void-valued expression such as SAVE_EXPRs
5782 generated by the Java frontend for class initialization. It is
5783 being executed only for its side-effects. */
5784 if (TREE_TYPE (val) == void_type_node)
5786 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5787 is_gimple_stmt, fb_none);
5788 val = NULL;
5790 else
5791 /* The temporary may not be an SSA name as later abnormal and EH
5792 control flow may invalidate use/def domination. */
5793 val = get_initialized_tmp_var (val, pre_p, post_p, false);
5795 TREE_OPERAND (*expr_p, 0) = val;
5796 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5799 *expr_p = val;
5801 return ret;
5804 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5806 unary_expr
5807 : ...
5808 | '&' varname
5811 PRE_P points to the list where side effects that must happen before
5812 *EXPR_P should be stored.
5814 POST_P points to the list where side effects that must happen after
5815 *EXPR_P should be stored. */
5817 static enum gimplify_status
5818 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5820 tree expr = *expr_p;
5821 tree op0 = TREE_OPERAND (expr, 0);
5822 enum gimplify_status ret;
5823 location_t loc = EXPR_LOCATION (*expr_p);
5825 switch (TREE_CODE (op0))
5827 case INDIRECT_REF:
5828 do_indirect_ref:
5829 /* Check if we are dealing with an expression of the form '&*ptr'.
5830 While the front end folds away '&*ptr' into 'ptr', these
5831 expressions may be generated internally by the compiler (e.g.,
5832 builtins like __builtin_va_end). */
5833 /* Caution: the silent array decomposition semantics we allow for
5834 ADDR_EXPR means we can't always discard the pair. */
5835 /* Gimplification of the ADDR_EXPR operand may drop
5836 cv-qualification conversions, so make sure we add them if
5837 needed. */
5839 tree op00 = TREE_OPERAND (op0, 0);
5840 tree t_expr = TREE_TYPE (expr);
5841 tree t_op00 = TREE_TYPE (op00);
5843 if (!useless_type_conversion_p (t_expr, t_op00))
5844 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5845 *expr_p = op00;
5846 ret = GS_OK;
5848 break;
5850 case VIEW_CONVERT_EXPR:
5851 /* Take the address of our operand and then convert it to the type of
5852 this ADDR_EXPR.
5854 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5855 all clear. The impact of this transformation is even less clear. */
5857 /* If the operand is a useless conversion, look through it. Doing so
5858 guarantees that the ADDR_EXPR and its operand will remain of the
5859 same type. */
5860 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5861 op0 = TREE_OPERAND (op0, 0);
5863 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5864 build_fold_addr_expr_loc (loc,
5865 TREE_OPERAND (op0, 0)));
5866 ret = GS_OK;
5867 break;
5869 case MEM_REF:
5870 if (integer_zerop (TREE_OPERAND (op0, 1)))
5871 goto do_indirect_ref;
5873 /* fall through */
5875 default:
5876 /* If we see a call to a declared builtin or see its address
5877 being taken (we can unify those cases here) then we can mark
5878 the builtin for implicit generation by GCC. */
5879 if (TREE_CODE (op0) == FUNCTION_DECL
5880 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5881 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5882 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5884 /* We use fb_either here because the C frontend sometimes takes
5885 the address of a call that returns a struct; see
5886 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5887 the implied temporary explicit. */
5889 /* Make the operand addressable. */
5890 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5891 is_gimple_addressable, fb_either);
5892 if (ret == GS_ERROR)
5893 break;
5895 /* Then mark it. Beware that it may not be possible to do so directly
5896 if a temporary has been created by the gimplification. */
5897 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5899 op0 = TREE_OPERAND (expr, 0);
5901 /* For various reasons, the gimplification of the expression
5902 may have made a new INDIRECT_REF. */
5903 if (TREE_CODE (op0) == INDIRECT_REF)
5904 goto do_indirect_ref;
5906 mark_addressable (TREE_OPERAND (expr, 0));
5908 /* The FEs may end up building ADDR_EXPRs early on a decl with
5909 an incomplete type. Re-build ADDR_EXPRs in canonical form
5910 here. */
5911 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5912 *expr_p = build_fold_addr_expr (op0);
5914 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5915 recompute_tree_invariant_for_addr_expr (*expr_p);
5917 /* If we re-built the ADDR_EXPR add a conversion to the original type
5918 if required. */
5919 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5920 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5922 break;
5925 return ret;
5928 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5929 value; output operands should be a gimple lvalue. */
5931 static enum gimplify_status
5932 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5934 tree expr;
5935 int noutputs;
5936 const char **oconstraints;
5937 int i;
5938 tree link;
5939 const char *constraint;
5940 bool allows_mem, allows_reg, is_inout;
5941 enum gimplify_status ret, tret;
5942 gasm *stmt;
5943 vec<tree, va_gc> *inputs;
5944 vec<tree, va_gc> *outputs;
5945 vec<tree, va_gc> *clobbers;
5946 vec<tree, va_gc> *labels;
5947 tree link_next;
5949 expr = *expr_p;
5950 noutputs = list_length (ASM_OUTPUTS (expr));
5951 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5953 inputs = NULL;
5954 outputs = NULL;
5955 clobbers = NULL;
5956 labels = NULL;
5958 ret = GS_ALL_DONE;
5959 link_next = NULL_TREE;
5960 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5962 bool ok;
5963 size_t constraint_len;
5965 link_next = TREE_CHAIN (link);
5967 oconstraints[i]
5968 = constraint
5969 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5970 constraint_len = strlen (constraint);
5971 if (constraint_len == 0)
5972 continue;
5974 ok = parse_output_constraint (&constraint, i, 0, 0,
5975 &allows_mem, &allows_reg, &is_inout);
5976 if (!ok)
5978 ret = GS_ERROR;
5979 is_inout = false;
5982 if (!allows_reg && allows_mem)
5983 mark_addressable (TREE_VALUE (link));
5985 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5986 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5987 fb_lvalue | fb_mayfail);
5988 if (tret == GS_ERROR)
5990 error ("invalid lvalue in asm output %d", i);
5991 ret = tret;
5994 /* If the constraint does not allow memory make sure we gimplify
5995 it to a register if it is not already but its base is. This
5996 happens for complex and vector components. */
5997 if (!allows_mem)
5999 tree op = TREE_VALUE (link);
6000 if (! is_gimple_val (op)
6001 && is_gimple_reg_type (TREE_TYPE (op))
6002 && is_gimple_reg (get_base_address (op)))
6004 tree tem = create_tmp_reg (TREE_TYPE (op));
6005 tree ass;
6006 if (is_inout)
6008 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6009 tem, unshare_expr (op));
6010 gimplify_and_add (ass, pre_p);
6012 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6013 gimplify_and_add (ass, post_p);
6015 TREE_VALUE (link) = tem;
6016 tret = GS_OK;
6020 vec_safe_push (outputs, link);
6021 TREE_CHAIN (link) = NULL_TREE;
6023 if (is_inout)
6025 /* An input/output operand. To give the optimizers more
6026 flexibility, split it into separate input and output
6027 operands. */
6028 tree input;
6029 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6030 char buf[11];
6032 /* Turn the in/out constraint into an output constraint. */
6033 char *p = xstrdup (constraint);
6034 p[0] = '=';
6035 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6037 /* And add a matching input constraint. */
6038 if (allows_reg)
6040 sprintf (buf, "%u", i);
6042 /* If there are multiple alternatives in the constraint,
6043 handle each of them individually. Those that allow register
6044 will be replaced with operand number, the others will stay
6045 unchanged. */
6046 if (strchr (p, ',') != NULL)
6048 size_t len = 0, buflen = strlen (buf);
6049 char *beg, *end, *str, *dst;
6051 for (beg = p + 1;;)
6053 end = strchr (beg, ',');
6054 if (end == NULL)
6055 end = strchr (beg, '\0');
6056 if ((size_t) (end - beg) < buflen)
6057 len += buflen + 1;
6058 else
6059 len += end - beg + 1;
6060 if (*end)
6061 beg = end + 1;
6062 else
6063 break;
6066 str = (char *) alloca (len);
6067 for (beg = p + 1, dst = str;;)
6069 const char *tem;
6070 bool mem_p, reg_p, inout_p;
6072 end = strchr (beg, ',');
6073 if (end)
6074 *end = '\0';
6075 beg[-1] = '=';
6076 tem = beg - 1;
6077 parse_output_constraint (&tem, i, 0, 0,
6078 &mem_p, &reg_p, &inout_p);
6079 if (dst != str)
6080 *dst++ = ',';
6081 if (reg_p)
6083 memcpy (dst, buf, buflen);
6084 dst += buflen;
6086 else
6088 if (end)
6089 len = end - beg;
6090 else
6091 len = strlen (beg);
6092 memcpy (dst, beg, len);
6093 dst += len;
6095 if (end)
6096 beg = end + 1;
6097 else
6098 break;
6100 *dst = '\0';
6101 input = build_string (dst - str, str);
6103 else
6104 input = build_string (strlen (buf), buf);
6106 else
6107 input = build_string (constraint_len - 1, constraint + 1);
6109 free (p);
6111 input = build_tree_list (build_tree_list (NULL_TREE, input),
6112 unshare_expr (TREE_VALUE (link)));
6113 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6117 link_next = NULL_TREE;
6118 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6120 link_next = TREE_CHAIN (link);
6121 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6122 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6123 oconstraints, &allows_mem, &allows_reg);
6125 /* If we can't make copies, we can only accept memory. */
6126 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6128 if (allows_mem)
6129 allows_reg = 0;
6130 else
6132 error ("impossible constraint in %<asm%>");
6133 error ("non-memory input %d must stay in memory", i);
6134 return GS_ERROR;
6138 /* If the operand is a memory input, it should be an lvalue. */
6139 if (!allows_reg && allows_mem)
6141 tree inputv = TREE_VALUE (link);
6142 STRIP_NOPS (inputv);
6143 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6144 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6145 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6146 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6147 || TREE_CODE (inputv) == MODIFY_EXPR)
6148 TREE_VALUE (link) = error_mark_node;
6149 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6150 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6151 if (tret != GS_ERROR)
6153 /* Unlike output operands, memory inputs are not guaranteed
6154 to be lvalues by the FE, and while the expressions are
6155 marked addressable there, if it is e.g. a statement
6156 expression, temporaries in it might not end up being
6157 addressable. They might be already used in the IL and thus
6158 it is too late to make them addressable now though. */
6159 tree x = TREE_VALUE (link);
6160 while (handled_component_p (x))
6161 x = TREE_OPERAND (x, 0);
6162 if (TREE_CODE (x) == MEM_REF
6163 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6164 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6165 if ((VAR_P (x)
6166 || TREE_CODE (x) == PARM_DECL
6167 || TREE_CODE (x) == RESULT_DECL)
6168 && !TREE_ADDRESSABLE (x)
6169 && is_gimple_reg (x))
6171 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6172 input_location), 0,
6173 "memory input %d is not directly addressable",
6175 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6178 mark_addressable (TREE_VALUE (link));
6179 if (tret == GS_ERROR)
6181 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6182 "memory input %d is not directly addressable", i);
6183 ret = tret;
6186 else
6188 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6189 is_gimple_asm_val, fb_rvalue);
6190 if (tret == GS_ERROR)
6191 ret = tret;
6194 TREE_CHAIN (link) = NULL_TREE;
6195 vec_safe_push (inputs, link);
6198 link_next = NULL_TREE;
6199 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6201 link_next = TREE_CHAIN (link);
6202 TREE_CHAIN (link) = NULL_TREE;
6203 vec_safe_push (clobbers, link);
6206 link_next = NULL_TREE;
6207 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6209 link_next = TREE_CHAIN (link);
6210 TREE_CHAIN (link) = NULL_TREE;
6211 vec_safe_push (labels, link);
6214 /* Do not add ASMs with errors to the gimple IL stream. */
6215 if (ret != GS_ERROR)
6217 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6218 inputs, outputs, clobbers, labels);
6220 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6221 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6223 gimplify_seq_add_stmt (pre_p, stmt);
6226 return ret;
6229 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6230 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6231 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6232 return to this function.
6234 FIXME should we complexify the prequeue handling instead? Or use flags
6235 for all the cleanups and let the optimizer tighten them up? The current
6236 code seems pretty fragile; it will break on a cleanup within any
6237 non-conditional nesting. But any such nesting would be broken, anyway;
6238 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6239 and continues out of it. We can do that at the RTL level, though, so
6240 having an optimizer to tighten up try/finally regions would be a Good
6241 Thing. */
6243 static enum gimplify_status
6244 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6246 gimple_stmt_iterator iter;
6247 gimple_seq body_sequence = NULL;
6249 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6251 /* We only care about the number of conditions between the innermost
6252 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6253 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6254 int old_conds = gimplify_ctxp->conditions;
6255 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6256 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6257 gimplify_ctxp->conditions = 0;
6258 gimplify_ctxp->conditional_cleanups = NULL;
6259 gimplify_ctxp->in_cleanup_point_expr = true;
6261 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6263 gimplify_ctxp->conditions = old_conds;
6264 gimplify_ctxp->conditional_cleanups = old_cleanups;
6265 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6267 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6269 gimple *wce = gsi_stmt (iter);
6271 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6273 if (gsi_one_before_end_p (iter))
6275 /* Note that gsi_insert_seq_before and gsi_remove do not
6276 scan operands, unlike some other sequence mutators. */
6277 if (!gimple_wce_cleanup_eh_only (wce))
6278 gsi_insert_seq_before_without_update (&iter,
6279 gimple_wce_cleanup (wce),
6280 GSI_SAME_STMT);
6281 gsi_remove (&iter, true);
6282 break;
6284 else
6286 gtry *gtry;
6287 gimple_seq seq;
6288 enum gimple_try_flags kind;
6290 if (gimple_wce_cleanup_eh_only (wce))
6291 kind = GIMPLE_TRY_CATCH;
6292 else
6293 kind = GIMPLE_TRY_FINALLY;
6294 seq = gsi_split_seq_after (iter);
6296 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6297 /* Do not use gsi_replace here, as it may scan operands.
6298 We want to do a simple structural modification only. */
6299 gsi_set_stmt (&iter, gtry);
6300 iter = gsi_start (gtry->eval);
6303 else
6304 gsi_next (&iter);
6307 gimplify_seq_add_seq (pre_p, body_sequence);
6308 if (temp)
6310 *expr_p = temp;
6311 return GS_OK;
6313 else
6315 *expr_p = NULL;
6316 return GS_ALL_DONE;
6320 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6321 is the cleanup action required. EH_ONLY is true if the cleanup should
6322 only be executed if an exception is thrown, not on normal exit.
6323 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6324 only valid for clobbers. */
6326 static void
6327 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6328 bool force_uncond = false)
6330 gimple *wce;
6331 gimple_seq cleanup_stmts = NULL;
6333 /* Errors can result in improperly nested cleanups. Which results in
6334 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6335 if (seen_error ())
6336 return;
6338 if (gimple_conditional_context ())
6340 /* If we're in a conditional context, this is more complex. We only
6341 want to run the cleanup if we actually ran the initialization that
6342 necessitates it, but we want to run it after the end of the
6343 conditional context. So we wrap the try/finally around the
6344 condition and use a flag to determine whether or not to actually
6345 run the destructor. Thus
6347 test ? f(A()) : 0
6349 becomes (approximately)
6351 flag = 0;
6352 try {
6353 if (test) { A::A(temp); flag = 1; val = f(temp); }
6354 else { val = 0; }
6355 } finally {
6356 if (flag) A::~A(temp);
6360 if (force_uncond)
6362 gimplify_stmt (&cleanup, &cleanup_stmts);
6363 wce = gimple_build_wce (cleanup_stmts);
6364 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6366 else
6368 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6369 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6370 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6372 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6373 gimplify_stmt (&cleanup, &cleanup_stmts);
6374 wce = gimple_build_wce (cleanup_stmts);
6376 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6377 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6378 gimplify_seq_add_stmt (pre_p, ftrue);
6380 /* Because of this manipulation, and the EH edges that jump
6381 threading cannot redirect, the temporary (VAR) will appear
6382 to be used uninitialized. Don't warn. */
6383 TREE_NO_WARNING (var) = 1;
6386 else
6388 gimplify_stmt (&cleanup, &cleanup_stmts);
6389 wce = gimple_build_wce (cleanup_stmts);
6390 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6391 gimplify_seq_add_stmt (pre_p, wce);
6395 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6397 static enum gimplify_status
6398 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6400 tree targ = *expr_p;
6401 tree temp = TARGET_EXPR_SLOT (targ);
6402 tree init = TARGET_EXPR_INITIAL (targ);
6403 enum gimplify_status ret;
6405 bool unpoison_empty_seq = false;
6406 gimple_stmt_iterator unpoison_it;
6408 if (init)
6410 tree cleanup = NULL_TREE;
6412 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6413 to the temps list. Handle also variable length TARGET_EXPRs. */
6414 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6416 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6417 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6418 gimplify_vla_decl (temp, pre_p);
6420 else
6422 /* Save location where we need to place unpoisoning. It's possible
6423 that a variable will be converted to needs_to_live_in_memory. */
6424 unpoison_it = gsi_last (*pre_p);
6425 unpoison_empty_seq = gsi_end_p (unpoison_it);
6427 gimple_add_tmp_var (temp);
6430 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6431 expression is supposed to initialize the slot. */
6432 if (VOID_TYPE_P (TREE_TYPE (init)))
6433 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6434 else
6436 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6437 init = init_expr;
6438 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6439 init = NULL;
6440 ggc_free (init_expr);
6442 if (ret == GS_ERROR)
6444 /* PR c++/28266 Make sure this is expanded only once. */
6445 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6446 return GS_ERROR;
6448 if (init)
6449 gimplify_and_add (init, pre_p);
6451 /* If needed, push the cleanup for the temp. */
6452 if (TARGET_EXPR_CLEANUP (targ))
6454 if (CLEANUP_EH_ONLY (targ))
6455 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6456 CLEANUP_EH_ONLY (targ), pre_p);
6457 else
6458 cleanup = TARGET_EXPR_CLEANUP (targ);
6461 /* Add a clobber for the temporary going out of scope, like
6462 gimplify_bind_expr. */
6463 if (gimplify_ctxp->in_cleanup_point_expr
6464 && needs_to_live_in_memory (temp))
6466 if (flag_stack_reuse == SR_ALL)
6468 tree clobber = build_constructor (TREE_TYPE (temp),
6469 NULL);
6470 TREE_THIS_VOLATILE (clobber) = true;
6471 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6472 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6474 if (asan_poisoned_variables && dbg_cnt (asan_use_after_scope))
6476 tree asan_cleanup = build_asan_poison_call_expr (temp);
6477 if (asan_cleanup)
6479 if (unpoison_empty_seq)
6480 unpoison_it = gsi_start (*pre_p);
6482 asan_poison_variable (temp, false, &unpoison_it,
6483 unpoison_empty_seq);
6484 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6488 if (cleanup)
6489 gimple_push_cleanup (temp, cleanup, false, pre_p);
6491 /* Only expand this once. */
6492 TREE_OPERAND (targ, 3) = init;
6493 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6495 else
6496 /* We should have expanded this before. */
6497 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6499 *expr_p = temp;
6500 return GS_OK;
6503 /* Gimplification of expression trees. */
6505 /* Gimplify an expression which appears at statement context. The
6506 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6507 NULL, a new sequence is allocated.
6509 Return true if we actually added a statement to the queue. */
6511 bool
6512 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6514 gimple_seq_node last;
6516 last = gimple_seq_last (*seq_p);
6517 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6518 return last != gimple_seq_last (*seq_p);
6521 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6522 to CTX. If entries already exist, force them to be some flavor of private.
6523 If there is no enclosing parallel, do nothing. */
6525 void
6526 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6528 splay_tree_node n;
6530 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6531 return;
6535 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6536 if (n != NULL)
6538 if (n->value & GOVD_SHARED)
6539 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6540 else if (n->value & GOVD_MAP)
6541 n->value |= GOVD_MAP_TO_ONLY;
6542 else
6543 return;
6545 else if ((ctx->region_type & ORT_TARGET) != 0)
6547 if (ctx->target_map_scalars_firstprivate)
6548 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6549 else
6550 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6552 else if (ctx->region_type != ORT_WORKSHARE
6553 && ctx->region_type != ORT_SIMD
6554 && ctx->region_type != ORT_ACC
6555 && !(ctx->region_type & ORT_TARGET_DATA))
6556 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6558 ctx = ctx->outer_context;
6560 while (ctx);
6563 /* Similarly for each of the type sizes of TYPE. */
6565 static void
6566 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6568 if (type == NULL || type == error_mark_node)
6569 return;
6570 type = TYPE_MAIN_VARIANT (type);
6572 if (ctx->privatized_types->add (type))
6573 return;
6575 switch (TREE_CODE (type))
6577 case INTEGER_TYPE:
6578 case ENUMERAL_TYPE:
6579 case BOOLEAN_TYPE:
6580 case REAL_TYPE:
6581 case FIXED_POINT_TYPE:
6582 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6583 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6584 break;
6586 case ARRAY_TYPE:
6587 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6588 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6589 break;
6591 case RECORD_TYPE:
6592 case UNION_TYPE:
6593 case QUAL_UNION_TYPE:
6595 tree field;
6596 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6597 if (TREE_CODE (field) == FIELD_DECL)
6599 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6600 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6603 break;
6605 case POINTER_TYPE:
6606 case REFERENCE_TYPE:
6607 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6608 break;
6610 default:
6611 break;
6614 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6615 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6616 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6619 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6621 static void
6622 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6624 splay_tree_node n;
6625 unsigned int nflags;
6626 tree t;
6628 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6629 return;
6631 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6632 there are constructors involved somewhere. */
6633 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
6634 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
6635 flags |= GOVD_SEEN;
6637 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6638 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6640 /* We shouldn't be re-adding the decl with the same data
6641 sharing class. */
6642 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6643 nflags = n->value | flags;
6644 /* The only combination of data sharing classes we should see is
6645 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6646 reduction variables to be used in data sharing clauses. */
6647 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6648 || ((nflags & GOVD_DATA_SHARE_CLASS)
6649 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6650 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6651 n->value = nflags;
6652 return;
6655 /* When adding a variable-sized variable, we have to handle all sorts
6656 of additional bits of data: the pointer replacement variable, and
6657 the parameters of the type. */
6658 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6660 /* Add the pointer replacement variable as PRIVATE if the variable
6661 replacement is private, else FIRSTPRIVATE since we'll need the
6662 address of the original variable either for SHARED, or for the
6663 copy into or out of the context. */
6664 if (!(flags & GOVD_LOCAL))
6666 if (flags & GOVD_MAP)
6667 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6668 else if (flags & GOVD_PRIVATE)
6669 nflags = GOVD_PRIVATE;
6670 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6671 && (flags & GOVD_FIRSTPRIVATE))
6672 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6673 else
6674 nflags = GOVD_FIRSTPRIVATE;
6675 nflags |= flags & GOVD_SEEN;
6676 t = DECL_VALUE_EXPR (decl);
6677 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6678 t = TREE_OPERAND (t, 0);
6679 gcc_assert (DECL_P (t));
6680 omp_add_variable (ctx, t, nflags);
6683 /* Add all of the variable and type parameters (which should have
6684 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6685 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6686 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6687 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6689 /* The variable-sized variable itself is never SHARED, only some form
6690 of PRIVATE. The sharing would take place via the pointer variable
6691 which we remapped above. */
6692 if (flags & GOVD_SHARED)
6693 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
6694 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6696 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6697 alloca statement we generate for the variable, so make sure it
6698 is available. This isn't automatically needed for the SHARED
6699 case, since we won't be allocating local storage then.
6700 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6701 in this case omp_notice_variable will be called later
6702 on when it is gimplified. */
6703 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6704 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6705 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6707 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6708 && lang_hooks.decls.omp_privatize_by_reference (decl))
6710 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6712 /* Similar to the direct variable sized case above, we'll need the
6713 size of references being privatized. */
6714 if ((flags & GOVD_SHARED) == 0)
6716 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6717 if (DECL_P (t))
6718 omp_notice_variable (ctx, t, true);
6722 if (n != NULL)
6723 n->value |= flags;
6724 else
6725 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6727 /* For reductions clauses in OpenACC loop directives, by default create a
6728 copy clause on the enclosing parallel construct for carrying back the
6729 results. */
6730 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6732 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6733 while (outer_ctx)
6735 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6736 if (n != NULL)
6738 /* Ignore local variables and explicitly declared clauses. */
6739 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6740 break;
6741 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6743 /* According to the OpenACC spec, such a reduction variable
6744 should already have a copy map on a kernels construct,
6745 verify that here. */
6746 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6747 && (n->value & GOVD_MAP));
6749 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6751 /* Remove firstprivate and make it a copy map. */
6752 n->value &= ~GOVD_FIRSTPRIVATE;
6753 n->value |= GOVD_MAP;
6756 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6758 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6759 GOVD_MAP | GOVD_SEEN);
6760 break;
6762 outer_ctx = outer_ctx->outer_context;
6767 /* Notice a threadprivate variable DECL used in OMP context CTX.
6768 This just prints out diagnostics about threadprivate variable uses
6769 in untied tasks. If DECL2 is non-NULL, prevent this warning
6770 on that variable. */
6772 static bool
6773 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6774 tree decl2)
6776 splay_tree_node n;
6777 struct gimplify_omp_ctx *octx;
6779 for (octx = ctx; octx; octx = octx->outer_context)
6780 if ((octx->region_type & ORT_TARGET) != 0)
6782 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6783 if (n == NULL)
6785 error ("threadprivate variable %qE used in target region",
6786 DECL_NAME (decl));
6787 error_at (octx->location, "enclosing target region");
6788 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6790 if (decl2)
6791 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6794 if (ctx->region_type != ORT_UNTIED_TASK)
6795 return false;
6796 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6797 if (n == NULL)
6799 error ("threadprivate variable %qE used in untied task",
6800 DECL_NAME (decl));
6801 error_at (ctx->location, "enclosing task");
6802 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6804 if (decl2)
6805 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6806 return false;
6809 /* Return true if global var DECL is device resident. */
6811 static bool
6812 device_resident_p (tree decl)
6814 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6816 if (!attr)
6817 return false;
6819 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6821 tree c = TREE_VALUE (t);
6822 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6823 return true;
6826 return false;
6829 /* Return true if DECL has an ACC DECLARE attribute. */
6831 static bool
6832 is_oacc_declared (tree decl)
6834 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6835 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6836 return declared != NULL_TREE;
6839 /* Determine outer default flags for DECL mentioned in an OMP region
6840 but not declared in an enclosing clause.
6842 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6843 remapped firstprivate instead of shared. To some extent this is
6844 addressed in omp_firstprivatize_type_sizes, but not
6845 effectively. */
6847 static unsigned
6848 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6849 bool in_code, unsigned flags)
6851 enum omp_clause_default_kind default_kind = ctx->default_kind;
6852 enum omp_clause_default_kind kind;
6854 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6855 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6856 default_kind = kind;
6858 switch (default_kind)
6860 case OMP_CLAUSE_DEFAULT_NONE:
6862 const char *rtype;
6864 if (ctx->region_type & ORT_PARALLEL)
6865 rtype = "parallel";
6866 else if (ctx->region_type & ORT_TASK)
6867 rtype = "task";
6868 else if (ctx->region_type & ORT_TEAMS)
6869 rtype = "teams";
6870 else
6871 gcc_unreachable ();
6873 error ("%qE not specified in enclosing %qs",
6874 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6875 error_at (ctx->location, "enclosing %qs", rtype);
6877 /* FALLTHRU */
6878 case OMP_CLAUSE_DEFAULT_SHARED:
6879 flags |= GOVD_SHARED;
6880 break;
6881 case OMP_CLAUSE_DEFAULT_PRIVATE:
6882 flags |= GOVD_PRIVATE;
6883 break;
6884 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6885 flags |= GOVD_FIRSTPRIVATE;
6886 break;
6887 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6888 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6889 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6890 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6892 omp_notice_variable (octx, decl, in_code);
6893 for (; octx; octx = octx->outer_context)
6895 splay_tree_node n2;
6897 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6898 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6899 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6900 continue;
6901 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6903 flags |= GOVD_FIRSTPRIVATE;
6904 goto found_outer;
6906 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6908 flags |= GOVD_SHARED;
6909 goto found_outer;
6914 if (TREE_CODE (decl) == PARM_DECL
6915 || (!is_global_var (decl)
6916 && DECL_CONTEXT (decl) == current_function_decl))
6917 flags |= GOVD_FIRSTPRIVATE;
6918 else
6919 flags |= GOVD_SHARED;
6920 found_outer:
6921 break;
6923 default:
6924 gcc_unreachable ();
6927 return flags;
6931 /* Determine outer default flags for DECL mentioned in an OACC region
6932 but not declared in an enclosing clause. */
6934 static unsigned
6935 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6937 const char *rkind;
6938 bool on_device = false;
6939 bool declared = is_oacc_declared (decl);
6940 tree type = TREE_TYPE (decl);
6942 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6943 type = TREE_TYPE (type);
6945 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6946 && is_global_var (decl)
6947 && device_resident_p (decl))
6949 on_device = true;
6950 flags |= GOVD_MAP_TO_ONLY;
6953 switch (ctx->region_type)
6955 default:
6956 gcc_unreachable ();
6958 case ORT_ACC_KERNELS:
6959 /* Scalars are default 'copy' under kernels, non-scalars are default
6960 'present_or_copy'. */
6961 flags |= GOVD_MAP;
6962 if (!AGGREGATE_TYPE_P (type))
6963 flags |= GOVD_MAP_FORCE;
6965 rkind = "kernels";
6966 break;
6968 case ORT_ACC_PARALLEL:
6970 if (on_device || AGGREGATE_TYPE_P (type) || declared)
6971 /* Aggregates default to 'present_or_copy'. */
6972 flags |= GOVD_MAP;
6973 else
6974 /* Scalars default to 'firstprivate'. */
6975 flags |= GOVD_FIRSTPRIVATE;
6976 rkind = "parallel";
6978 break;
6981 if (DECL_ARTIFICIAL (decl))
6982 ; /* We can get compiler-generated decls, and should not complain
6983 about them. */
6984 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
6986 error ("%qE not specified in enclosing OpenACC %qs construct",
6987 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
6988 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
6990 else
6991 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
6993 return flags;
6996 /* Record the fact that DECL was used within the OMP context CTX.
6997 IN_CODE is true when real code uses DECL, and false when we should
6998 merely emit default(none) errors. Return true if DECL is going to
6999 be remapped and thus DECL shouldn't be gimplified into its
7000 DECL_VALUE_EXPR (if any). */
7002 static bool
7003 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7005 splay_tree_node n;
7006 unsigned flags = in_code ? GOVD_SEEN : 0;
7007 bool ret = false, shared;
7009 if (error_operand_p (decl))
7010 return false;
7012 if (ctx->region_type == ORT_NONE)
7013 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7015 if (is_global_var (decl))
7017 /* Threadprivate variables are predetermined. */
7018 if (DECL_THREAD_LOCAL_P (decl))
7019 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7021 if (DECL_HAS_VALUE_EXPR_P (decl))
7023 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7025 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7026 return omp_notice_threadprivate_variable (ctx, decl, value);
7029 if (gimplify_omp_ctxp->outer_context == NULL
7030 && VAR_P (decl)
7031 && oacc_get_fn_attrib (current_function_decl))
7033 location_t loc = DECL_SOURCE_LOCATION (decl);
7035 if (lookup_attribute ("omp declare target link",
7036 DECL_ATTRIBUTES (decl)))
7038 error_at (loc,
7039 "%qE with %<link%> clause used in %<routine%> function",
7040 DECL_NAME (decl));
7041 return false;
7043 else if (!lookup_attribute ("omp declare target",
7044 DECL_ATTRIBUTES (decl)))
7046 error_at (loc,
7047 "%qE requires a %<declare%> directive for use "
7048 "in a %<routine%> function", DECL_NAME (decl));
7049 return false;
7054 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7055 if ((ctx->region_type & ORT_TARGET) != 0)
7057 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7058 if (n == NULL)
7060 unsigned nflags = flags;
7061 if (ctx->target_map_pointers_as_0len_arrays
7062 || ctx->target_map_scalars_firstprivate)
7064 bool is_declare_target = false;
7065 bool is_scalar = false;
7066 if (is_global_var (decl)
7067 && varpool_node::get_create (decl)->offloadable)
7069 struct gimplify_omp_ctx *octx;
7070 for (octx = ctx->outer_context;
7071 octx; octx = octx->outer_context)
7073 n = splay_tree_lookup (octx->variables,
7074 (splay_tree_key)decl);
7075 if (n
7076 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7077 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7078 break;
7080 is_declare_target = octx == NULL;
7082 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
7083 is_scalar = lang_hooks.decls.omp_scalar_p (decl);
7084 if (is_declare_target)
7086 else if (ctx->target_map_pointers_as_0len_arrays
7087 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7088 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7089 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7090 == POINTER_TYPE)))
7091 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7092 else if (is_scalar)
7093 nflags |= GOVD_FIRSTPRIVATE;
7096 struct gimplify_omp_ctx *octx = ctx->outer_context;
7097 if ((ctx->region_type & ORT_ACC) && octx)
7099 /* Look in outer OpenACC contexts, to see if there's a
7100 data attribute for this variable. */
7101 omp_notice_variable (octx, decl, in_code);
7103 for (; octx; octx = octx->outer_context)
7105 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7106 break;
7107 splay_tree_node n2
7108 = splay_tree_lookup (octx->variables,
7109 (splay_tree_key) decl);
7110 if (n2)
7112 if (octx->region_type == ORT_ACC_HOST_DATA)
7113 error ("variable %qE declared in enclosing "
7114 "%<host_data%> region", DECL_NAME (decl));
7115 nflags |= GOVD_MAP;
7116 if (octx->region_type == ORT_ACC_DATA
7117 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7118 nflags |= GOVD_MAP_0LEN_ARRAY;
7119 goto found_outer;
7125 tree type = TREE_TYPE (decl);
7127 if (nflags == flags
7128 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7129 && lang_hooks.decls.omp_privatize_by_reference (decl))
7130 type = TREE_TYPE (type);
7131 if (nflags == flags
7132 && !lang_hooks.types.omp_mappable_type (type))
7134 error ("%qD referenced in target region does not have "
7135 "a mappable type", decl);
7136 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7138 else if (nflags == flags)
7140 if ((ctx->region_type & ORT_ACC) != 0)
7141 nflags = oacc_default_clause (ctx, decl, flags);
7142 else
7143 nflags |= GOVD_MAP;
7146 found_outer:
7147 omp_add_variable (ctx, decl, nflags);
7149 else
7151 /* If nothing changed, there's nothing left to do. */
7152 if ((n->value & flags) == flags)
7153 return ret;
7154 flags |= n->value;
7155 n->value = flags;
7157 goto do_outer;
7160 if (n == NULL)
7162 if (ctx->region_type == ORT_WORKSHARE
7163 || ctx->region_type == ORT_SIMD
7164 || ctx->region_type == ORT_ACC
7165 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7166 goto do_outer;
7168 flags = omp_default_clause (ctx, decl, in_code, flags);
7170 if ((flags & GOVD_PRIVATE)
7171 && lang_hooks.decls.omp_private_outer_ref (decl))
7172 flags |= GOVD_PRIVATE_OUTER_REF;
7174 omp_add_variable (ctx, decl, flags);
7176 shared = (flags & GOVD_SHARED) != 0;
7177 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7178 goto do_outer;
7181 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7182 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7183 && DECL_SIZE (decl))
7185 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7187 splay_tree_node n2;
7188 tree t = DECL_VALUE_EXPR (decl);
7189 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7190 t = TREE_OPERAND (t, 0);
7191 gcc_assert (DECL_P (t));
7192 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7193 n2->value |= GOVD_SEEN;
7195 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7196 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7197 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7198 != INTEGER_CST))
7200 splay_tree_node n2;
7201 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7202 gcc_assert (DECL_P (t));
7203 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7204 if (n2)
7205 omp_notice_variable (ctx, t, true);
7209 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7210 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7212 /* If nothing changed, there's nothing left to do. */
7213 if ((n->value & flags) == flags)
7214 return ret;
7215 flags |= n->value;
7216 n->value = flags;
7218 do_outer:
7219 /* If the variable is private in the current context, then we don't
7220 need to propagate anything to an outer context. */
7221 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7222 return ret;
7223 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7224 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7225 return ret;
7226 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7227 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7228 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7229 return ret;
7230 if (ctx->outer_context
7231 && omp_notice_variable (ctx->outer_context, decl, in_code))
7232 return true;
7233 return ret;
7236 /* Verify that DECL is private within CTX. If there's specific information
7237 to the contrary in the innermost scope, generate an error. */
7239 static bool
7240 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7242 splay_tree_node n;
7244 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7245 if (n != NULL)
7247 if (n->value & GOVD_SHARED)
7249 if (ctx == gimplify_omp_ctxp)
7251 if (simd)
7252 error ("iteration variable %qE is predetermined linear",
7253 DECL_NAME (decl));
7254 else
7255 error ("iteration variable %qE should be private",
7256 DECL_NAME (decl));
7257 n->value = GOVD_PRIVATE;
7258 return true;
7260 else
7261 return false;
7263 else if ((n->value & GOVD_EXPLICIT) != 0
7264 && (ctx == gimplify_omp_ctxp
7265 || (ctx->region_type == ORT_COMBINED_PARALLEL
7266 && gimplify_omp_ctxp->outer_context == ctx)))
7268 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7269 error ("iteration variable %qE should not be firstprivate",
7270 DECL_NAME (decl));
7271 else if ((n->value & GOVD_REDUCTION) != 0)
7272 error ("iteration variable %qE should not be reduction",
7273 DECL_NAME (decl));
7274 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7275 error ("iteration variable %qE should not be linear",
7276 DECL_NAME (decl));
7277 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7278 error ("iteration variable %qE should not be lastprivate",
7279 DECL_NAME (decl));
7280 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7281 error ("iteration variable %qE should not be private",
7282 DECL_NAME (decl));
7283 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7284 error ("iteration variable %qE is predetermined linear",
7285 DECL_NAME (decl));
7287 return (ctx == gimplify_omp_ctxp
7288 || (ctx->region_type == ORT_COMBINED_PARALLEL
7289 && gimplify_omp_ctxp->outer_context == ctx));
7292 if (ctx->region_type != ORT_WORKSHARE
7293 && ctx->region_type != ORT_SIMD
7294 && ctx->region_type != ORT_ACC)
7295 return false;
7296 else if (ctx->outer_context)
7297 return omp_is_private (ctx->outer_context, decl, simd);
7298 return false;
7301 /* Return true if DECL is private within a parallel region
7302 that binds to the current construct's context or in parallel
7303 region's REDUCTION clause. */
7305 static bool
7306 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7308 splay_tree_node n;
7312 ctx = ctx->outer_context;
7313 if (ctx == NULL)
7315 if (is_global_var (decl))
7316 return false;
7318 /* References might be private, but might be shared too,
7319 when checking for copyprivate, assume they might be
7320 private, otherwise assume they might be shared. */
7321 if (copyprivate)
7322 return true;
7324 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7325 return false;
7327 /* Treat C++ privatized non-static data members outside
7328 of the privatization the same. */
7329 if (omp_member_access_dummy_var (decl))
7330 return false;
7332 return true;
7335 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7337 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7338 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7339 continue;
7341 if (n != NULL)
7343 if ((n->value & GOVD_LOCAL) != 0
7344 && omp_member_access_dummy_var (decl))
7345 return false;
7346 return (n->value & GOVD_SHARED) == 0;
7349 while (ctx->region_type == ORT_WORKSHARE
7350 || ctx->region_type == ORT_SIMD
7351 || ctx->region_type == ORT_ACC);
7352 return false;
7355 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7357 static tree
7358 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7360 tree t = *tp;
7362 /* If this node has been visited, unmark it and keep looking. */
7363 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7364 return t;
7366 if (IS_TYPE_OR_DECL_P (t))
7367 *walk_subtrees = 0;
7368 return NULL_TREE;
7371 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7372 and previous omp contexts. */
7374 static void
7375 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7376 enum omp_region_type region_type,
7377 enum tree_code code)
7379 struct gimplify_omp_ctx *ctx, *outer_ctx;
7380 tree c;
7381 hash_map<tree, tree> *struct_map_to_clause = NULL;
7382 tree *prev_list_p = NULL;
7384 ctx = new_omp_context (region_type);
7385 outer_ctx = ctx->outer_context;
7386 if (code == OMP_TARGET)
7388 if (!lang_GNU_Fortran ())
7389 ctx->target_map_pointers_as_0len_arrays = true;
7390 ctx->target_map_scalars_firstprivate = true;
7392 if (!lang_GNU_Fortran ())
7393 switch (code)
7395 case OMP_TARGET:
7396 case OMP_TARGET_DATA:
7397 case OMP_TARGET_ENTER_DATA:
7398 case OMP_TARGET_EXIT_DATA:
7399 case OACC_DECLARE:
7400 case OACC_HOST_DATA:
7401 ctx->target_firstprivatize_array_bases = true;
7402 default:
7403 break;
7406 while ((c = *list_p) != NULL)
7408 bool remove = false;
7409 bool notice_outer = true;
7410 const char *check_non_private = NULL;
7411 unsigned int flags;
7412 tree decl;
7414 switch (OMP_CLAUSE_CODE (c))
7416 case OMP_CLAUSE_PRIVATE:
7417 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7418 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7420 flags |= GOVD_PRIVATE_OUTER_REF;
7421 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7423 else
7424 notice_outer = false;
7425 goto do_add;
7426 case OMP_CLAUSE_SHARED:
7427 flags = GOVD_SHARED | GOVD_EXPLICIT;
7428 goto do_add;
7429 case OMP_CLAUSE_FIRSTPRIVATE:
7430 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7431 check_non_private = "firstprivate";
7432 goto do_add;
7433 case OMP_CLAUSE_LASTPRIVATE:
7434 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7435 check_non_private = "lastprivate";
7436 decl = OMP_CLAUSE_DECL (c);
7437 if (error_operand_p (decl))
7438 goto do_add;
7439 else if (outer_ctx
7440 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7441 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7442 && splay_tree_lookup (outer_ctx->variables,
7443 (splay_tree_key) decl) == NULL)
7445 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7446 if (outer_ctx->outer_context)
7447 omp_notice_variable (outer_ctx->outer_context, decl, true);
7449 else if (outer_ctx
7450 && (outer_ctx->region_type & ORT_TASK) != 0
7451 && outer_ctx->combined_loop
7452 && splay_tree_lookup (outer_ctx->variables,
7453 (splay_tree_key) decl) == NULL)
7455 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7456 if (outer_ctx->outer_context)
7457 omp_notice_variable (outer_ctx->outer_context, decl, true);
7459 else if (outer_ctx
7460 && (outer_ctx->region_type == ORT_WORKSHARE
7461 || outer_ctx->region_type == ORT_ACC)
7462 && outer_ctx->combined_loop
7463 && splay_tree_lookup (outer_ctx->variables,
7464 (splay_tree_key) decl) == NULL
7465 && !omp_check_private (outer_ctx, decl, false))
7467 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7468 if (outer_ctx->outer_context
7469 && (outer_ctx->outer_context->region_type
7470 == ORT_COMBINED_PARALLEL)
7471 && splay_tree_lookup (outer_ctx->outer_context->variables,
7472 (splay_tree_key) decl) == NULL)
7474 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7475 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7476 if (octx->outer_context)
7478 octx = octx->outer_context;
7479 if (octx->region_type == ORT_WORKSHARE
7480 && octx->combined_loop
7481 && splay_tree_lookup (octx->variables,
7482 (splay_tree_key) decl) == NULL
7483 && !omp_check_private (octx, decl, false))
7485 omp_add_variable (octx, decl,
7486 GOVD_LASTPRIVATE | GOVD_SEEN);
7487 octx = octx->outer_context;
7488 if (octx
7489 && octx->region_type == ORT_COMBINED_TEAMS
7490 && (splay_tree_lookup (octx->variables,
7491 (splay_tree_key) decl)
7492 == NULL))
7494 omp_add_variable (octx, decl,
7495 GOVD_SHARED | GOVD_SEEN);
7496 octx = octx->outer_context;
7499 if (octx)
7500 omp_notice_variable (octx, decl, true);
7503 else if (outer_ctx->outer_context)
7504 omp_notice_variable (outer_ctx->outer_context, decl, true);
7506 goto do_add;
7507 case OMP_CLAUSE_REDUCTION:
7508 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7509 /* OpenACC permits reductions on private variables. */
7510 if (!(region_type & ORT_ACC))
7511 check_non_private = "reduction";
7512 decl = OMP_CLAUSE_DECL (c);
7513 if (TREE_CODE (decl) == MEM_REF)
7515 tree type = TREE_TYPE (decl);
7516 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7517 NULL, is_gimple_val, fb_rvalue, false)
7518 == GS_ERROR)
7520 remove = true;
7521 break;
7523 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7524 if (DECL_P (v))
7526 omp_firstprivatize_variable (ctx, v);
7527 omp_notice_variable (ctx, v, true);
7529 decl = TREE_OPERAND (decl, 0);
7530 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7532 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7533 NULL, is_gimple_val, fb_rvalue, false)
7534 == GS_ERROR)
7536 remove = true;
7537 break;
7539 v = TREE_OPERAND (decl, 1);
7540 if (DECL_P (v))
7542 omp_firstprivatize_variable (ctx, v);
7543 omp_notice_variable (ctx, v, true);
7545 decl = TREE_OPERAND (decl, 0);
7547 if (TREE_CODE (decl) == ADDR_EXPR
7548 || TREE_CODE (decl) == INDIRECT_REF)
7549 decl = TREE_OPERAND (decl, 0);
7551 goto do_add_decl;
7552 case OMP_CLAUSE_LINEAR:
7553 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7554 is_gimple_val, fb_rvalue) == GS_ERROR)
7556 remove = true;
7557 break;
7559 else
7561 if (code == OMP_SIMD
7562 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7564 struct gimplify_omp_ctx *octx = outer_ctx;
7565 if (octx
7566 && octx->region_type == ORT_WORKSHARE
7567 && octx->combined_loop
7568 && !octx->distribute)
7570 if (octx->outer_context
7571 && (octx->outer_context->region_type
7572 == ORT_COMBINED_PARALLEL))
7573 octx = octx->outer_context->outer_context;
7574 else
7575 octx = octx->outer_context;
7577 if (octx
7578 && octx->region_type == ORT_WORKSHARE
7579 && octx->combined_loop
7580 && octx->distribute)
7582 error_at (OMP_CLAUSE_LOCATION (c),
7583 "%<linear%> clause for variable other than "
7584 "loop iterator specified on construct "
7585 "combined with %<distribute%>");
7586 remove = true;
7587 break;
7590 /* For combined #pragma omp parallel for simd, need to put
7591 lastprivate and perhaps firstprivate too on the
7592 parallel. Similarly for #pragma omp for simd. */
7593 struct gimplify_omp_ctx *octx = outer_ctx;
7594 decl = NULL_TREE;
7597 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7598 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7599 break;
7600 decl = OMP_CLAUSE_DECL (c);
7601 if (error_operand_p (decl))
7603 decl = NULL_TREE;
7604 break;
7606 flags = GOVD_SEEN;
7607 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7608 flags |= GOVD_FIRSTPRIVATE;
7609 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7610 flags |= GOVD_LASTPRIVATE;
7611 if (octx
7612 && octx->region_type == ORT_WORKSHARE
7613 && octx->combined_loop)
7615 if (octx->outer_context
7616 && (octx->outer_context->region_type
7617 == ORT_COMBINED_PARALLEL))
7618 octx = octx->outer_context;
7619 else if (omp_check_private (octx, decl, false))
7620 break;
7622 else if (octx
7623 && (octx->region_type & ORT_TASK) != 0
7624 && octx->combined_loop)
7626 else if (octx
7627 && octx->region_type == ORT_COMBINED_PARALLEL
7628 && ctx->region_type == ORT_WORKSHARE
7629 && octx == outer_ctx)
7630 flags = GOVD_SEEN | GOVD_SHARED;
7631 else if (octx
7632 && octx->region_type == ORT_COMBINED_TEAMS)
7633 flags = GOVD_SEEN | GOVD_SHARED;
7634 else if (octx
7635 && octx->region_type == ORT_COMBINED_TARGET)
7637 flags &= ~GOVD_LASTPRIVATE;
7638 if (flags == GOVD_SEEN)
7639 break;
7641 else
7642 break;
7643 splay_tree_node on
7644 = splay_tree_lookup (octx->variables,
7645 (splay_tree_key) decl);
7646 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7648 octx = NULL;
7649 break;
7651 omp_add_variable (octx, decl, flags);
7652 if (octx->outer_context == NULL)
7653 break;
7654 octx = octx->outer_context;
7656 while (1);
7657 if (octx
7658 && decl
7659 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7660 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7661 omp_notice_variable (octx, decl, true);
7663 flags = GOVD_LINEAR | GOVD_EXPLICIT;
7664 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7665 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7667 notice_outer = false;
7668 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7670 goto do_add;
7672 case OMP_CLAUSE_MAP:
7673 decl = OMP_CLAUSE_DECL (c);
7674 if (error_operand_p (decl))
7675 remove = true;
7676 switch (code)
7678 case OMP_TARGET:
7679 break;
7680 case OACC_DATA:
7681 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7682 break;
7683 /* FALLTHRU */
7684 case OMP_TARGET_DATA:
7685 case OMP_TARGET_ENTER_DATA:
7686 case OMP_TARGET_EXIT_DATA:
7687 case OACC_ENTER_DATA:
7688 case OACC_EXIT_DATA:
7689 case OACC_HOST_DATA:
7690 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7691 || (OMP_CLAUSE_MAP_KIND (c)
7692 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7693 /* For target {,enter ,exit }data only the array slice is
7694 mapped, but not the pointer to it. */
7695 remove = true;
7696 break;
7697 default:
7698 break;
7700 if (remove)
7701 break;
7702 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7704 struct gimplify_omp_ctx *octx;
7705 for (octx = outer_ctx; octx; octx = octx->outer_context)
7707 if (octx->region_type != ORT_ACC_HOST_DATA)
7708 break;
7709 splay_tree_node n2
7710 = splay_tree_lookup (octx->variables,
7711 (splay_tree_key) decl);
7712 if (n2)
7713 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7714 "declared in enclosing %<host_data%> region",
7715 DECL_NAME (decl));
7718 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7719 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7720 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7721 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7722 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7724 remove = true;
7725 break;
7727 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7728 || (OMP_CLAUSE_MAP_KIND (c)
7729 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7730 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7732 OMP_CLAUSE_SIZE (c)
7733 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7734 false);
7735 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7736 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7738 if (!DECL_P (decl))
7740 tree d = decl, *pd;
7741 if (TREE_CODE (d) == ARRAY_REF)
7743 while (TREE_CODE (d) == ARRAY_REF)
7744 d = TREE_OPERAND (d, 0);
7745 if (TREE_CODE (d) == COMPONENT_REF
7746 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7747 decl = d;
7749 pd = &OMP_CLAUSE_DECL (c);
7750 if (d == decl
7751 && TREE_CODE (decl) == INDIRECT_REF
7752 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7753 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7754 == REFERENCE_TYPE))
7756 pd = &TREE_OPERAND (decl, 0);
7757 decl = TREE_OPERAND (decl, 0);
7759 if (TREE_CODE (decl) == COMPONENT_REF)
7761 while (TREE_CODE (decl) == COMPONENT_REF)
7762 decl = TREE_OPERAND (decl, 0);
7763 if (TREE_CODE (decl) == INDIRECT_REF
7764 && DECL_P (TREE_OPERAND (decl, 0))
7765 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7766 == REFERENCE_TYPE))
7767 decl = TREE_OPERAND (decl, 0);
7769 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7770 == GS_ERROR)
7772 remove = true;
7773 break;
7775 if (DECL_P (decl))
7777 if (error_operand_p (decl))
7779 remove = true;
7780 break;
7783 tree stype = TREE_TYPE (decl);
7784 if (TREE_CODE (stype) == REFERENCE_TYPE)
7785 stype = TREE_TYPE (stype);
7786 if (TYPE_SIZE_UNIT (stype) == NULL
7787 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7789 error_at (OMP_CLAUSE_LOCATION (c),
7790 "mapping field %qE of variable length "
7791 "structure", OMP_CLAUSE_DECL (c));
7792 remove = true;
7793 break;
7796 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7798 /* Error recovery. */
7799 if (prev_list_p == NULL)
7801 remove = true;
7802 break;
7804 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7806 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7807 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7809 remove = true;
7810 break;
7815 tree offset;
7816 HOST_WIDE_INT bitsize, bitpos;
7817 machine_mode mode;
7818 int unsignedp, reversep, volatilep = 0;
7819 tree base = OMP_CLAUSE_DECL (c);
7820 while (TREE_CODE (base) == ARRAY_REF)
7821 base = TREE_OPERAND (base, 0);
7822 if (TREE_CODE (base) == INDIRECT_REF)
7823 base = TREE_OPERAND (base, 0);
7824 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7825 &mode, &unsignedp, &reversep,
7826 &volatilep);
7827 tree orig_base = base;
7828 if ((TREE_CODE (base) == INDIRECT_REF
7829 || (TREE_CODE (base) == MEM_REF
7830 && integer_zerop (TREE_OPERAND (base, 1))))
7831 && DECL_P (TREE_OPERAND (base, 0))
7832 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7833 == REFERENCE_TYPE))
7834 base = TREE_OPERAND (base, 0);
7835 gcc_assert (base == decl
7836 && (offset == NULL_TREE
7837 || TREE_CODE (offset) == INTEGER_CST));
7839 splay_tree_node n
7840 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7841 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7842 == GOMP_MAP_ALWAYS_POINTER);
7843 if (n == NULL || (n->value & GOVD_MAP) == 0)
7845 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7846 OMP_CLAUSE_MAP);
7847 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7848 if (orig_base != base)
7849 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7850 else
7851 OMP_CLAUSE_DECL (l) = decl;
7852 OMP_CLAUSE_SIZE (l) = size_int (1);
7853 if (struct_map_to_clause == NULL)
7854 struct_map_to_clause = new hash_map<tree, tree>;
7855 struct_map_to_clause->put (decl, l);
7856 if (ptr)
7858 enum gomp_map_kind mkind
7859 = code == OMP_TARGET_EXIT_DATA
7860 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7861 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7862 OMP_CLAUSE_MAP);
7863 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7864 OMP_CLAUSE_DECL (c2)
7865 = unshare_expr (OMP_CLAUSE_DECL (c));
7866 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7867 OMP_CLAUSE_SIZE (c2)
7868 = TYPE_SIZE_UNIT (ptr_type_node);
7869 OMP_CLAUSE_CHAIN (l) = c2;
7870 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7872 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7873 tree c3
7874 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7875 OMP_CLAUSE_MAP);
7876 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7877 OMP_CLAUSE_DECL (c3)
7878 = unshare_expr (OMP_CLAUSE_DECL (c4));
7879 OMP_CLAUSE_SIZE (c3)
7880 = TYPE_SIZE_UNIT (ptr_type_node);
7881 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7882 OMP_CLAUSE_CHAIN (c2) = c3;
7884 *prev_list_p = l;
7885 prev_list_p = NULL;
7887 else
7889 OMP_CLAUSE_CHAIN (l) = c;
7890 *list_p = l;
7891 list_p = &OMP_CLAUSE_CHAIN (l);
7893 if (orig_base != base && code == OMP_TARGET)
7895 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7896 OMP_CLAUSE_MAP);
7897 enum gomp_map_kind mkind
7898 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7899 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7900 OMP_CLAUSE_DECL (c2) = decl;
7901 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7902 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7903 OMP_CLAUSE_CHAIN (l) = c2;
7905 flags = GOVD_MAP | GOVD_EXPLICIT;
7906 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7907 flags |= GOVD_SEEN;
7908 goto do_add_decl;
7910 else
7912 tree *osc = struct_map_to_clause->get (decl);
7913 tree *sc = NULL, *scp = NULL;
7914 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7915 n->value |= GOVD_SEEN;
7916 offset_int o1, o2;
7917 if (offset)
7918 o1 = wi::to_offset (offset);
7919 else
7920 o1 = 0;
7921 if (bitpos)
7922 o1 = o1 + bitpos / BITS_PER_UNIT;
7923 sc = &OMP_CLAUSE_CHAIN (*osc);
7924 if (*sc != c
7925 && (OMP_CLAUSE_MAP_KIND (*sc)
7926 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7927 sc = &OMP_CLAUSE_CHAIN (*sc);
7928 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7929 if (ptr && sc == prev_list_p)
7930 break;
7931 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7932 != COMPONENT_REF
7933 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7934 != INDIRECT_REF)
7935 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7936 != ARRAY_REF))
7937 break;
7938 else
7940 tree offset2;
7941 HOST_WIDE_INT bitsize2, bitpos2;
7942 base = OMP_CLAUSE_DECL (*sc);
7943 if (TREE_CODE (base) == ARRAY_REF)
7945 while (TREE_CODE (base) == ARRAY_REF)
7946 base = TREE_OPERAND (base, 0);
7947 if (TREE_CODE (base) != COMPONENT_REF
7948 || (TREE_CODE (TREE_TYPE (base))
7949 != ARRAY_TYPE))
7950 break;
7952 else if (TREE_CODE (base) == INDIRECT_REF
7953 && (TREE_CODE (TREE_OPERAND (base, 0))
7954 == COMPONENT_REF)
7955 && (TREE_CODE (TREE_TYPE
7956 (TREE_OPERAND (base, 0)))
7957 == REFERENCE_TYPE))
7958 base = TREE_OPERAND (base, 0);
7959 base = get_inner_reference (base, &bitsize2,
7960 &bitpos2, &offset2,
7961 &mode, &unsignedp,
7962 &reversep, &volatilep);
7963 if ((TREE_CODE (base) == INDIRECT_REF
7964 || (TREE_CODE (base) == MEM_REF
7965 && integer_zerop (TREE_OPERAND (base,
7966 1))))
7967 && DECL_P (TREE_OPERAND (base, 0))
7968 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
7969 0)))
7970 == REFERENCE_TYPE))
7971 base = TREE_OPERAND (base, 0);
7972 if (base != decl)
7973 break;
7974 if (scp)
7975 continue;
7976 gcc_assert (offset == NULL_TREE
7977 || TREE_CODE (offset) == INTEGER_CST);
7978 tree d1 = OMP_CLAUSE_DECL (*sc);
7979 tree d2 = OMP_CLAUSE_DECL (c);
7980 while (TREE_CODE (d1) == ARRAY_REF)
7981 d1 = TREE_OPERAND (d1, 0);
7982 while (TREE_CODE (d2) == ARRAY_REF)
7983 d2 = TREE_OPERAND (d2, 0);
7984 if (TREE_CODE (d1) == INDIRECT_REF)
7985 d1 = TREE_OPERAND (d1, 0);
7986 if (TREE_CODE (d2) == INDIRECT_REF)
7987 d2 = TREE_OPERAND (d2, 0);
7988 while (TREE_CODE (d1) == COMPONENT_REF)
7989 if (TREE_CODE (d2) == COMPONENT_REF
7990 && TREE_OPERAND (d1, 1)
7991 == TREE_OPERAND (d2, 1))
7993 d1 = TREE_OPERAND (d1, 0);
7994 d2 = TREE_OPERAND (d2, 0);
7996 else
7997 break;
7998 if (d1 == d2)
8000 error_at (OMP_CLAUSE_LOCATION (c),
8001 "%qE appears more than once in map "
8002 "clauses", OMP_CLAUSE_DECL (c));
8003 remove = true;
8004 break;
8006 if (offset2)
8007 o2 = wi::to_offset (offset2);
8008 else
8009 o2 = 0;
8010 if (bitpos2)
8011 o2 = o2 + bitpos2 / BITS_PER_UNIT;
8012 if (wi::ltu_p (o1, o2)
8013 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
8015 if (ptr)
8016 scp = sc;
8017 else
8018 break;
8021 if (remove)
8022 break;
8023 OMP_CLAUSE_SIZE (*osc)
8024 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8025 size_one_node);
8026 if (ptr)
8028 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8029 OMP_CLAUSE_MAP);
8030 tree cl = NULL_TREE;
8031 enum gomp_map_kind mkind
8032 = code == OMP_TARGET_EXIT_DATA
8033 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8034 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8035 OMP_CLAUSE_DECL (c2)
8036 = unshare_expr (OMP_CLAUSE_DECL (c));
8037 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8038 OMP_CLAUSE_SIZE (c2)
8039 = TYPE_SIZE_UNIT (ptr_type_node);
8040 cl = scp ? *prev_list_p : c2;
8041 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8043 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8044 tree c3
8045 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8046 OMP_CLAUSE_MAP);
8047 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8048 OMP_CLAUSE_DECL (c3)
8049 = unshare_expr (OMP_CLAUSE_DECL (c4));
8050 OMP_CLAUSE_SIZE (c3)
8051 = TYPE_SIZE_UNIT (ptr_type_node);
8052 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8053 if (!scp)
8054 OMP_CLAUSE_CHAIN (c2) = c3;
8055 else
8056 cl = c3;
8058 if (scp)
8059 *scp = c2;
8060 if (sc == prev_list_p)
8062 *sc = cl;
8063 prev_list_p = NULL;
8065 else
8067 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8068 list_p = prev_list_p;
8069 prev_list_p = NULL;
8070 OMP_CLAUSE_CHAIN (c) = *sc;
8071 *sc = cl;
8072 continue;
8075 else if (*sc != c)
8077 *list_p = OMP_CLAUSE_CHAIN (c);
8078 OMP_CLAUSE_CHAIN (c) = *sc;
8079 *sc = c;
8080 continue;
8084 if (!remove
8085 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8086 && OMP_CLAUSE_CHAIN (c)
8087 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8088 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8089 == GOMP_MAP_ALWAYS_POINTER))
8090 prev_list_p = list_p;
8091 break;
8093 flags = GOVD_MAP | GOVD_EXPLICIT;
8094 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8095 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8096 flags |= GOVD_MAP_ALWAYS_TO;
8097 goto do_add;
8099 case OMP_CLAUSE_DEPEND:
8100 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8102 tree deps = OMP_CLAUSE_DECL (c);
8103 while (deps && TREE_CODE (deps) == TREE_LIST)
8105 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8106 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8107 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8108 pre_p, NULL, is_gimple_val, fb_rvalue);
8109 deps = TREE_CHAIN (deps);
8111 break;
8113 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8114 break;
8115 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8117 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8118 NULL, is_gimple_val, fb_rvalue);
8119 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8121 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8123 remove = true;
8124 break;
8126 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8127 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8128 is_gimple_val, fb_rvalue) == GS_ERROR)
8130 remove = true;
8131 break;
8133 break;
8135 case OMP_CLAUSE_TO:
8136 case OMP_CLAUSE_FROM:
8137 case OMP_CLAUSE__CACHE_:
8138 decl = OMP_CLAUSE_DECL (c);
8139 if (error_operand_p (decl))
8141 remove = true;
8142 break;
8144 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8145 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8146 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8147 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8148 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8150 remove = true;
8151 break;
8153 if (!DECL_P (decl))
8155 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8156 NULL, is_gimple_lvalue, fb_lvalue)
8157 == GS_ERROR)
8159 remove = true;
8160 break;
8162 break;
8164 goto do_notice;
8166 case OMP_CLAUSE_USE_DEVICE_PTR:
8167 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8168 goto do_add;
8169 case OMP_CLAUSE_IS_DEVICE_PTR:
8170 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8171 goto do_add;
8173 do_add:
8174 decl = OMP_CLAUSE_DECL (c);
8175 do_add_decl:
8176 if (error_operand_p (decl))
8178 remove = true;
8179 break;
8181 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8183 tree t = omp_member_access_dummy_var (decl);
8184 if (t)
8186 tree v = DECL_VALUE_EXPR (decl);
8187 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8188 if (outer_ctx)
8189 omp_notice_variable (outer_ctx, t, true);
8192 if (code == OACC_DATA
8193 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8194 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8195 flags |= GOVD_MAP_0LEN_ARRAY;
8196 omp_add_variable (ctx, decl, flags);
8197 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8198 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8200 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8201 GOVD_LOCAL | GOVD_SEEN);
8202 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8203 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8204 find_decl_expr,
8205 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8206 NULL) == NULL_TREE)
8207 omp_add_variable (ctx,
8208 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8209 GOVD_LOCAL | GOVD_SEEN);
8210 gimplify_omp_ctxp = ctx;
8211 push_gimplify_context ();
8213 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8214 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8216 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8217 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8218 pop_gimplify_context
8219 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8220 push_gimplify_context ();
8221 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8222 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8223 pop_gimplify_context
8224 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8225 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8226 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8228 gimplify_omp_ctxp = outer_ctx;
8230 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8231 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8233 gimplify_omp_ctxp = ctx;
8234 push_gimplify_context ();
8235 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8237 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8238 NULL, NULL);
8239 TREE_SIDE_EFFECTS (bind) = 1;
8240 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8241 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8243 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8244 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8245 pop_gimplify_context
8246 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8247 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8249 gimplify_omp_ctxp = outer_ctx;
8251 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8252 && OMP_CLAUSE_LINEAR_STMT (c))
8254 gimplify_omp_ctxp = ctx;
8255 push_gimplify_context ();
8256 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8258 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8259 NULL, NULL);
8260 TREE_SIDE_EFFECTS (bind) = 1;
8261 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8262 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8264 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8265 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8266 pop_gimplify_context
8267 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8268 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8270 gimplify_omp_ctxp = outer_ctx;
8272 if (notice_outer)
8273 goto do_notice;
8274 break;
8276 case OMP_CLAUSE_COPYIN:
8277 case OMP_CLAUSE_COPYPRIVATE:
8278 decl = OMP_CLAUSE_DECL (c);
8279 if (error_operand_p (decl))
8281 remove = true;
8282 break;
8284 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8285 && !remove
8286 && !omp_check_private (ctx, decl, true))
8288 remove = true;
8289 if (is_global_var (decl))
8291 if (DECL_THREAD_LOCAL_P (decl))
8292 remove = false;
8293 else if (DECL_HAS_VALUE_EXPR_P (decl))
8295 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8297 if (value
8298 && DECL_P (value)
8299 && DECL_THREAD_LOCAL_P (value))
8300 remove = false;
8303 if (remove)
8304 error_at (OMP_CLAUSE_LOCATION (c),
8305 "copyprivate variable %qE is not threadprivate"
8306 " or private in outer context", DECL_NAME (decl));
8308 do_notice:
8309 if (outer_ctx)
8310 omp_notice_variable (outer_ctx, decl, true);
8311 if (check_non_private
8312 && region_type == ORT_WORKSHARE
8313 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8314 || decl == OMP_CLAUSE_DECL (c)
8315 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8316 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8317 == ADDR_EXPR
8318 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8319 == POINTER_PLUS_EXPR
8320 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8321 (OMP_CLAUSE_DECL (c), 0), 0))
8322 == ADDR_EXPR)))))
8323 && omp_check_private (ctx, decl, false))
8325 error ("%s variable %qE is private in outer context",
8326 check_non_private, DECL_NAME (decl));
8327 remove = true;
8329 break;
8331 case OMP_CLAUSE_IF:
8332 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8333 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8335 const char *p[2];
8336 for (int i = 0; i < 2; i++)
8337 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8339 case OMP_PARALLEL: p[i] = "parallel"; break;
8340 case OMP_TASK: p[i] = "task"; break;
8341 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8342 case OMP_TARGET_DATA: p[i] = "target data"; break;
8343 case OMP_TARGET: p[i] = "target"; break;
8344 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8345 case OMP_TARGET_ENTER_DATA:
8346 p[i] = "target enter data"; break;
8347 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8348 default: gcc_unreachable ();
8350 error_at (OMP_CLAUSE_LOCATION (c),
8351 "expected %qs %<if%> clause modifier rather than %qs",
8352 p[0], p[1]);
8353 remove = true;
8355 /* Fall through. */
8357 case OMP_CLAUSE_FINAL:
8358 OMP_CLAUSE_OPERAND (c, 0)
8359 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8360 /* Fall through. */
8362 case OMP_CLAUSE_SCHEDULE:
8363 case OMP_CLAUSE_NUM_THREADS:
8364 case OMP_CLAUSE_NUM_TEAMS:
8365 case OMP_CLAUSE_THREAD_LIMIT:
8366 case OMP_CLAUSE_DIST_SCHEDULE:
8367 case OMP_CLAUSE_DEVICE:
8368 case OMP_CLAUSE_PRIORITY:
8369 case OMP_CLAUSE_GRAINSIZE:
8370 case OMP_CLAUSE_NUM_TASKS:
8371 case OMP_CLAUSE_HINT:
8372 case OMP_CLAUSE__CILK_FOR_COUNT_:
8373 case OMP_CLAUSE_ASYNC:
8374 case OMP_CLAUSE_WAIT:
8375 case OMP_CLAUSE_NUM_GANGS:
8376 case OMP_CLAUSE_NUM_WORKERS:
8377 case OMP_CLAUSE_VECTOR_LENGTH:
8378 case OMP_CLAUSE_WORKER:
8379 case OMP_CLAUSE_VECTOR:
8380 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8381 is_gimple_val, fb_rvalue) == GS_ERROR)
8382 remove = true;
8383 break;
8385 case OMP_CLAUSE_GANG:
8386 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8387 is_gimple_val, fb_rvalue) == GS_ERROR)
8388 remove = true;
8389 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8390 is_gimple_val, fb_rvalue) == GS_ERROR)
8391 remove = true;
8392 break;
8394 case OMP_CLAUSE_NOWAIT:
8395 case OMP_CLAUSE_ORDERED:
8396 case OMP_CLAUSE_UNTIED:
8397 case OMP_CLAUSE_COLLAPSE:
8398 case OMP_CLAUSE_TILE:
8399 case OMP_CLAUSE_AUTO:
8400 case OMP_CLAUSE_SEQ:
8401 case OMP_CLAUSE_INDEPENDENT:
8402 case OMP_CLAUSE_MERGEABLE:
8403 case OMP_CLAUSE_PROC_BIND:
8404 case OMP_CLAUSE_SAFELEN:
8405 case OMP_CLAUSE_SIMDLEN:
8406 case OMP_CLAUSE_NOGROUP:
8407 case OMP_CLAUSE_THREADS:
8408 case OMP_CLAUSE_SIMD:
8409 break;
8411 case OMP_CLAUSE_DEFAULTMAP:
8412 ctx->target_map_scalars_firstprivate = false;
8413 break;
8415 case OMP_CLAUSE_ALIGNED:
8416 decl = OMP_CLAUSE_DECL (c);
8417 if (error_operand_p (decl))
8419 remove = true;
8420 break;
8422 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8423 is_gimple_val, fb_rvalue) == GS_ERROR)
8425 remove = true;
8426 break;
8428 if (!is_global_var (decl)
8429 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8430 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8431 break;
8433 case OMP_CLAUSE_DEFAULT:
8434 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8435 break;
8437 default:
8438 gcc_unreachable ();
8441 if (code == OACC_DATA
8442 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8443 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8444 remove = true;
8445 if (remove)
8446 *list_p = OMP_CLAUSE_CHAIN (c);
8447 else
8448 list_p = &OMP_CLAUSE_CHAIN (c);
8451 gimplify_omp_ctxp = ctx;
8452 if (struct_map_to_clause)
8453 delete struct_map_to_clause;
8456 /* Return true if DECL is a candidate for shared to firstprivate
8457 optimization. We only consider non-addressable scalars, not
8458 too big, and not references. */
8460 static bool
8461 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8463 if (TREE_ADDRESSABLE (decl))
8464 return false;
8465 tree type = TREE_TYPE (decl);
8466 if (!is_gimple_reg_type (type)
8467 || TREE_CODE (type) == REFERENCE_TYPE
8468 || TREE_ADDRESSABLE (type))
8469 return false;
8470 /* Don't optimize too large decls, as each thread/task will have
8471 its own. */
8472 HOST_WIDE_INT len = int_size_in_bytes (type);
8473 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8474 return false;
8475 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8476 return false;
8477 return true;
8480 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8481 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8482 GOVD_WRITTEN in outer contexts. */
8484 static void
8485 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8487 for (; ctx; ctx = ctx->outer_context)
8489 splay_tree_node n = splay_tree_lookup (ctx->variables,
8490 (splay_tree_key) decl);
8491 if (n == NULL)
8492 continue;
8493 else if (n->value & GOVD_SHARED)
8495 n->value |= GOVD_WRITTEN;
8496 return;
8498 else if (n->value & GOVD_DATA_SHARE_CLASS)
8499 return;
8503 /* Helper callback for walk_gimple_seq to discover possible stores
8504 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8505 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8506 for those. */
8508 static tree
8509 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8511 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8513 *walk_subtrees = 0;
8514 if (!wi->is_lhs)
8515 return NULL_TREE;
8517 tree op = *tp;
8520 if (handled_component_p (op))
8521 op = TREE_OPERAND (op, 0);
8522 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8523 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8524 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8525 else
8526 break;
8528 while (1);
8529 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8530 return NULL_TREE;
8532 omp_mark_stores (gimplify_omp_ctxp, op);
8533 return NULL_TREE;
8536 /* Helper callback for walk_gimple_seq to discover possible stores
8537 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8538 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8539 for those. */
8541 static tree
8542 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8543 bool *handled_ops_p,
8544 struct walk_stmt_info *wi)
8546 gimple *stmt = gsi_stmt (*gsi_p);
8547 switch (gimple_code (stmt))
8549 /* Don't recurse on OpenMP constructs for which
8550 gimplify_adjust_omp_clauses already handled the bodies,
8551 except handle gimple_omp_for_pre_body. */
8552 case GIMPLE_OMP_FOR:
8553 *handled_ops_p = true;
8554 if (gimple_omp_for_pre_body (stmt))
8555 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8556 omp_find_stores_stmt, omp_find_stores_op, wi);
8557 break;
8558 case GIMPLE_OMP_PARALLEL:
8559 case GIMPLE_OMP_TASK:
8560 case GIMPLE_OMP_SECTIONS:
8561 case GIMPLE_OMP_SINGLE:
8562 case GIMPLE_OMP_TARGET:
8563 case GIMPLE_OMP_TEAMS:
8564 case GIMPLE_OMP_CRITICAL:
8565 *handled_ops_p = true;
8566 break;
8567 default:
8568 break;
8570 return NULL_TREE;
8573 struct gimplify_adjust_omp_clauses_data
8575 tree *list_p;
8576 gimple_seq *pre_p;
8579 /* For all variables that were not actually used within the context,
8580 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8582 static int
8583 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8585 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8586 gimple_seq *pre_p
8587 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8588 tree decl = (tree) n->key;
8589 unsigned flags = n->value;
8590 enum omp_clause_code code;
8591 tree clause;
8592 bool private_debug;
8594 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8595 return 0;
8596 if ((flags & GOVD_SEEN) == 0)
8597 return 0;
8598 if (flags & GOVD_DEBUG_PRIVATE)
8600 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
8601 private_debug = true;
8603 else if (flags & GOVD_MAP)
8604 private_debug = false;
8605 else
8606 private_debug
8607 = lang_hooks.decls.omp_private_debug_clause (decl,
8608 !!(flags & GOVD_SHARED));
8609 if (private_debug)
8610 code = OMP_CLAUSE_PRIVATE;
8611 else if (flags & GOVD_MAP)
8613 code = OMP_CLAUSE_MAP;
8614 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8615 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8617 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8618 return 0;
8621 else if (flags & GOVD_SHARED)
8623 if (is_global_var (decl))
8625 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8626 while (ctx != NULL)
8628 splay_tree_node on
8629 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8630 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8631 | GOVD_PRIVATE | GOVD_REDUCTION
8632 | GOVD_LINEAR | GOVD_MAP)) != 0)
8633 break;
8634 ctx = ctx->outer_context;
8636 if (ctx == NULL)
8637 return 0;
8639 code = OMP_CLAUSE_SHARED;
8641 else if (flags & GOVD_PRIVATE)
8642 code = OMP_CLAUSE_PRIVATE;
8643 else if (flags & GOVD_FIRSTPRIVATE)
8645 code = OMP_CLAUSE_FIRSTPRIVATE;
8646 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8647 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8648 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8650 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8651 "%<target%> construct", decl);
8652 return 0;
8655 else if (flags & GOVD_LASTPRIVATE)
8656 code = OMP_CLAUSE_LASTPRIVATE;
8657 else if (flags & GOVD_ALIGNED)
8658 return 0;
8659 else
8660 gcc_unreachable ();
8662 if (((flags & GOVD_LASTPRIVATE)
8663 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8664 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8665 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8667 tree chain = *list_p;
8668 clause = build_omp_clause (input_location, code);
8669 OMP_CLAUSE_DECL (clause) = decl;
8670 OMP_CLAUSE_CHAIN (clause) = chain;
8671 if (private_debug)
8672 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8673 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8674 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8675 else if (code == OMP_CLAUSE_SHARED
8676 && (flags & GOVD_WRITTEN) == 0
8677 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8678 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8679 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8680 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8681 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8683 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8684 OMP_CLAUSE_DECL (nc) = decl;
8685 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8686 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8687 OMP_CLAUSE_DECL (clause)
8688 = build_simple_mem_ref_loc (input_location, decl);
8689 OMP_CLAUSE_DECL (clause)
8690 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8691 build_int_cst (build_pointer_type (char_type_node), 0));
8692 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8693 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8694 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8695 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8696 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8697 OMP_CLAUSE_CHAIN (nc) = chain;
8698 OMP_CLAUSE_CHAIN (clause) = nc;
8699 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8700 gimplify_omp_ctxp = ctx->outer_context;
8701 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8702 pre_p, NULL, is_gimple_val, fb_rvalue);
8703 gimplify_omp_ctxp = ctx;
8705 else if (code == OMP_CLAUSE_MAP)
8707 int kind = (flags & GOVD_MAP_TO_ONLY
8708 ? GOMP_MAP_TO
8709 : GOMP_MAP_TOFROM);
8710 if (flags & GOVD_MAP_FORCE)
8711 kind |= GOMP_MAP_FLAG_FORCE;
8712 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8713 if (DECL_SIZE (decl)
8714 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8716 tree decl2 = DECL_VALUE_EXPR (decl);
8717 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8718 decl2 = TREE_OPERAND (decl2, 0);
8719 gcc_assert (DECL_P (decl2));
8720 tree mem = build_simple_mem_ref (decl2);
8721 OMP_CLAUSE_DECL (clause) = mem;
8722 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8723 if (gimplify_omp_ctxp->outer_context)
8725 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8726 omp_notice_variable (ctx, decl2, true);
8727 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8729 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8730 OMP_CLAUSE_MAP);
8731 OMP_CLAUSE_DECL (nc) = decl;
8732 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8733 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8734 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8735 else
8736 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8737 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8738 OMP_CLAUSE_CHAIN (clause) = nc;
8740 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8741 && lang_hooks.decls.omp_privatize_by_reference (decl))
8743 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8744 OMP_CLAUSE_SIZE (clause)
8745 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8746 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8747 gimplify_omp_ctxp = ctx->outer_context;
8748 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8749 pre_p, NULL, is_gimple_val, fb_rvalue);
8750 gimplify_omp_ctxp = ctx;
8751 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8752 OMP_CLAUSE_MAP);
8753 OMP_CLAUSE_DECL (nc) = decl;
8754 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8755 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8756 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8757 OMP_CLAUSE_CHAIN (clause) = nc;
8759 else
8760 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8762 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8764 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8765 OMP_CLAUSE_DECL (nc) = decl;
8766 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8767 OMP_CLAUSE_CHAIN (nc) = chain;
8768 OMP_CLAUSE_CHAIN (clause) = nc;
8769 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8770 gimplify_omp_ctxp = ctx->outer_context;
8771 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8772 gimplify_omp_ctxp = ctx;
8774 *list_p = clause;
8775 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8776 gimplify_omp_ctxp = ctx->outer_context;
8777 lang_hooks.decls.omp_finish_clause (clause, pre_p);
8778 if (gimplify_omp_ctxp)
8779 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
8780 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
8781 && DECL_P (OMP_CLAUSE_SIZE (clause)))
8782 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
8783 true);
8784 gimplify_omp_ctxp = ctx;
8785 return 0;
8788 static void
8789 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
8790 enum tree_code code)
8792 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8793 tree c, decl;
8795 if (body)
8797 struct gimplify_omp_ctx *octx;
8798 for (octx = ctx; octx; octx = octx->outer_context)
8799 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8800 break;
8801 if (octx)
8803 struct walk_stmt_info wi;
8804 memset (&wi, 0, sizeof (wi));
8805 walk_gimple_seq (body, omp_find_stores_stmt,
8806 omp_find_stores_op, &wi);
8809 while ((c = *list_p) != NULL)
8811 splay_tree_node n;
8812 bool remove = false;
8814 switch (OMP_CLAUSE_CODE (c))
8816 case OMP_CLAUSE_FIRSTPRIVATE:
8817 if ((ctx->region_type & ORT_TARGET)
8818 && (ctx->region_type & ORT_ACC) == 0
8819 && TYPE_ATOMIC (strip_array_types
8820 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
8822 error_at (OMP_CLAUSE_LOCATION (c),
8823 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8824 "%<target%> construct", OMP_CLAUSE_DECL (c));
8825 remove = true;
8826 break;
8828 /* FALLTHRU */
8829 case OMP_CLAUSE_PRIVATE:
8830 case OMP_CLAUSE_SHARED:
8831 case OMP_CLAUSE_LINEAR:
8832 decl = OMP_CLAUSE_DECL (c);
8833 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8834 remove = !(n->value & GOVD_SEEN);
8835 if (! remove)
8837 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8838 if ((n->value & GOVD_DEBUG_PRIVATE)
8839 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8841 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8842 || ((n->value & GOVD_DATA_SHARE_CLASS)
8843 == GOVD_PRIVATE));
8844 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8845 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8847 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8848 && (n->value & GOVD_WRITTEN) == 0
8849 && DECL_P (decl)
8850 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8851 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8852 else if (DECL_P (decl)
8853 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8854 && (n->value & GOVD_WRITTEN) != 1)
8855 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8856 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8857 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8858 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8860 break;
8862 case OMP_CLAUSE_LASTPRIVATE:
8863 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8864 accurately reflect the presence of a FIRSTPRIVATE clause. */
8865 decl = OMP_CLAUSE_DECL (c);
8866 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8867 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8868 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8869 if (code == OMP_DISTRIBUTE
8870 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8872 remove = true;
8873 error_at (OMP_CLAUSE_LOCATION (c),
8874 "same variable used in %<firstprivate%> and "
8875 "%<lastprivate%> clauses on %<distribute%> "
8876 "construct");
8878 if (!remove
8879 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8880 && DECL_P (decl)
8881 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8882 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8883 break;
8885 case OMP_CLAUSE_ALIGNED:
8886 decl = OMP_CLAUSE_DECL (c);
8887 if (!is_global_var (decl))
8889 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8890 remove = n == NULL || !(n->value & GOVD_SEEN);
8891 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8893 struct gimplify_omp_ctx *octx;
8894 if (n != NULL
8895 && (n->value & (GOVD_DATA_SHARE_CLASS
8896 & ~GOVD_FIRSTPRIVATE)))
8897 remove = true;
8898 else
8899 for (octx = ctx->outer_context; octx;
8900 octx = octx->outer_context)
8902 n = splay_tree_lookup (octx->variables,
8903 (splay_tree_key) decl);
8904 if (n == NULL)
8905 continue;
8906 if (n->value & GOVD_LOCAL)
8907 break;
8908 /* We have to avoid assigning a shared variable
8909 to itself when trying to add
8910 __builtin_assume_aligned. */
8911 if (n->value & GOVD_SHARED)
8913 remove = true;
8914 break;
8919 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8921 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8922 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8923 remove = true;
8925 break;
8927 case OMP_CLAUSE_MAP:
8928 if (code == OMP_TARGET_EXIT_DATA
8929 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8931 remove = true;
8932 break;
8934 decl = OMP_CLAUSE_DECL (c);
8935 /* Data clauses associated with acc parallel reductions must be
8936 compatible with present_or_copy. Warn and adjust the clause
8937 if that is not the case. */
8938 if (ctx->region_type == ORT_ACC_PARALLEL)
8940 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
8941 n = NULL;
8943 if (DECL_P (t))
8944 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8946 if (n && (n->value & GOVD_REDUCTION))
8948 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
8950 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
8951 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
8952 && kind != GOMP_MAP_FORCE_PRESENT
8953 && kind != GOMP_MAP_POINTER)
8955 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8956 "incompatible data clause with reduction "
8957 "on %qE; promoting to present_or_copy",
8958 DECL_NAME (t));
8959 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
8963 if (!DECL_P (decl))
8965 if ((ctx->region_type & ORT_TARGET) != 0
8966 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8968 if (TREE_CODE (decl) == INDIRECT_REF
8969 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8970 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8971 == REFERENCE_TYPE))
8972 decl = TREE_OPERAND (decl, 0);
8973 if (TREE_CODE (decl) == COMPONENT_REF)
8975 while (TREE_CODE (decl) == COMPONENT_REF)
8976 decl = TREE_OPERAND (decl, 0);
8977 if (DECL_P (decl))
8979 n = splay_tree_lookup (ctx->variables,
8980 (splay_tree_key) decl);
8981 if (!(n->value & GOVD_SEEN))
8982 remove = true;
8986 break;
8988 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8989 if ((ctx->region_type & ORT_TARGET) != 0
8990 && !(n->value & GOVD_SEEN)
8991 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
8992 && (!is_global_var (decl)
8993 || !lookup_attribute ("omp declare target link",
8994 DECL_ATTRIBUTES (decl))))
8996 remove = true;
8997 /* For struct element mapping, if struct is never referenced
8998 in target block and none of the mapping has always modifier,
8999 remove all the struct element mappings, which immediately
9000 follow the GOMP_MAP_STRUCT map clause. */
9001 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9003 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9004 while (cnt--)
9005 OMP_CLAUSE_CHAIN (c)
9006 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9009 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9010 && code == OMP_TARGET_EXIT_DATA)
9011 remove = true;
9012 else if (DECL_SIZE (decl)
9013 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9014 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9015 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9016 && (OMP_CLAUSE_MAP_KIND (c)
9017 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9019 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9020 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9021 INTEGER_CST. */
9022 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9024 tree decl2 = DECL_VALUE_EXPR (decl);
9025 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9026 decl2 = TREE_OPERAND (decl2, 0);
9027 gcc_assert (DECL_P (decl2));
9028 tree mem = build_simple_mem_ref (decl2);
9029 OMP_CLAUSE_DECL (c) = mem;
9030 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9031 if (ctx->outer_context)
9033 omp_notice_variable (ctx->outer_context, decl2, true);
9034 omp_notice_variable (ctx->outer_context,
9035 OMP_CLAUSE_SIZE (c), true);
9037 if (((ctx->region_type & ORT_TARGET) != 0
9038 || !ctx->target_firstprivatize_array_bases)
9039 && ((n->value & GOVD_SEEN) == 0
9040 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9042 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9043 OMP_CLAUSE_MAP);
9044 OMP_CLAUSE_DECL (nc) = decl;
9045 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9046 if (ctx->target_firstprivatize_array_bases)
9047 OMP_CLAUSE_SET_MAP_KIND (nc,
9048 GOMP_MAP_FIRSTPRIVATE_POINTER);
9049 else
9050 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9051 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9052 OMP_CLAUSE_CHAIN (c) = nc;
9053 c = nc;
9056 else
9058 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9059 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9060 gcc_assert ((n->value & GOVD_SEEN) == 0
9061 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9062 == 0));
9064 break;
9066 case OMP_CLAUSE_TO:
9067 case OMP_CLAUSE_FROM:
9068 case OMP_CLAUSE__CACHE_:
9069 decl = OMP_CLAUSE_DECL (c);
9070 if (!DECL_P (decl))
9071 break;
9072 if (DECL_SIZE (decl)
9073 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9075 tree decl2 = DECL_VALUE_EXPR (decl);
9076 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9077 decl2 = TREE_OPERAND (decl2, 0);
9078 gcc_assert (DECL_P (decl2));
9079 tree mem = build_simple_mem_ref (decl2);
9080 OMP_CLAUSE_DECL (c) = mem;
9081 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9082 if (ctx->outer_context)
9084 omp_notice_variable (ctx->outer_context, decl2, true);
9085 omp_notice_variable (ctx->outer_context,
9086 OMP_CLAUSE_SIZE (c), true);
9089 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9090 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9091 break;
9093 case OMP_CLAUSE_REDUCTION:
9094 decl = OMP_CLAUSE_DECL (c);
9095 /* OpenACC reductions need a present_or_copy data clause.
9096 Add one if necessary. Error is the reduction is private. */
9097 if (ctx->region_type == ORT_ACC_PARALLEL)
9099 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9100 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9101 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9102 "reduction on %qE", DECL_NAME (decl));
9103 else if ((n->value & GOVD_MAP) == 0)
9105 tree next = OMP_CLAUSE_CHAIN (c);
9106 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9107 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9108 OMP_CLAUSE_DECL (nc) = decl;
9109 OMP_CLAUSE_CHAIN (c) = nc;
9110 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9111 while (1)
9113 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9114 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9115 break;
9116 nc = OMP_CLAUSE_CHAIN (nc);
9118 OMP_CLAUSE_CHAIN (nc) = next;
9119 n->value |= GOVD_MAP;
9122 if (DECL_P (decl)
9123 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9124 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9125 break;
9126 case OMP_CLAUSE_COPYIN:
9127 case OMP_CLAUSE_COPYPRIVATE:
9128 case OMP_CLAUSE_IF:
9129 case OMP_CLAUSE_NUM_THREADS:
9130 case OMP_CLAUSE_NUM_TEAMS:
9131 case OMP_CLAUSE_THREAD_LIMIT:
9132 case OMP_CLAUSE_DIST_SCHEDULE:
9133 case OMP_CLAUSE_DEVICE:
9134 case OMP_CLAUSE_SCHEDULE:
9135 case OMP_CLAUSE_NOWAIT:
9136 case OMP_CLAUSE_ORDERED:
9137 case OMP_CLAUSE_DEFAULT:
9138 case OMP_CLAUSE_UNTIED:
9139 case OMP_CLAUSE_COLLAPSE:
9140 case OMP_CLAUSE_FINAL:
9141 case OMP_CLAUSE_MERGEABLE:
9142 case OMP_CLAUSE_PROC_BIND:
9143 case OMP_CLAUSE_SAFELEN:
9144 case OMP_CLAUSE_SIMDLEN:
9145 case OMP_CLAUSE_DEPEND:
9146 case OMP_CLAUSE_PRIORITY:
9147 case OMP_CLAUSE_GRAINSIZE:
9148 case OMP_CLAUSE_NUM_TASKS:
9149 case OMP_CLAUSE_NOGROUP:
9150 case OMP_CLAUSE_THREADS:
9151 case OMP_CLAUSE_SIMD:
9152 case OMP_CLAUSE_HINT:
9153 case OMP_CLAUSE_DEFAULTMAP:
9154 case OMP_CLAUSE_USE_DEVICE_PTR:
9155 case OMP_CLAUSE_IS_DEVICE_PTR:
9156 case OMP_CLAUSE__CILK_FOR_COUNT_:
9157 case OMP_CLAUSE_ASYNC:
9158 case OMP_CLAUSE_WAIT:
9159 case OMP_CLAUSE_INDEPENDENT:
9160 case OMP_CLAUSE_NUM_GANGS:
9161 case OMP_CLAUSE_NUM_WORKERS:
9162 case OMP_CLAUSE_VECTOR_LENGTH:
9163 case OMP_CLAUSE_GANG:
9164 case OMP_CLAUSE_WORKER:
9165 case OMP_CLAUSE_VECTOR:
9166 case OMP_CLAUSE_AUTO:
9167 case OMP_CLAUSE_SEQ:
9168 case OMP_CLAUSE_TILE:
9169 break;
9171 default:
9172 gcc_unreachable ();
9175 if (remove)
9176 *list_p = OMP_CLAUSE_CHAIN (c);
9177 else
9178 list_p = &OMP_CLAUSE_CHAIN (c);
9181 /* Add in any implicit data sharing. */
9182 struct gimplify_adjust_omp_clauses_data data;
9183 data.list_p = list_p;
9184 data.pre_p = pre_p;
9185 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9187 gimplify_omp_ctxp = ctx->outer_context;
9188 delete_omp_context (ctx);
9191 /* Gimplify OACC_CACHE. */
9193 static void
9194 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9196 tree expr = *expr_p;
9198 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9199 OACC_CACHE);
9200 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9201 OACC_CACHE);
9203 /* TODO: Do something sensible with this information. */
9205 *expr_p = NULL_TREE;
9208 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9209 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9210 kind. The entry kind will replace the one in CLAUSE, while the exit
9211 kind will be used in a new omp_clause and returned to the caller. */
9213 static tree
9214 gimplify_oacc_declare_1 (tree clause)
9216 HOST_WIDE_INT kind, new_op;
9217 bool ret = false;
9218 tree c = NULL;
9220 kind = OMP_CLAUSE_MAP_KIND (clause);
9222 switch (kind)
9224 case GOMP_MAP_ALLOC:
9225 case GOMP_MAP_FORCE_ALLOC:
9226 case GOMP_MAP_FORCE_TO:
9227 new_op = GOMP_MAP_DELETE;
9228 ret = true;
9229 break;
9231 case GOMP_MAP_FORCE_FROM:
9232 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9233 new_op = GOMP_MAP_FORCE_FROM;
9234 ret = true;
9235 break;
9237 case GOMP_MAP_FORCE_TOFROM:
9238 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9239 new_op = GOMP_MAP_FORCE_FROM;
9240 ret = true;
9241 break;
9243 case GOMP_MAP_FROM:
9244 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9245 new_op = GOMP_MAP_FROM;
9246 ret = true;
9247 break;
9249 case GOMP_MAP_TOFROM:
9250 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9251 new_op = GOMP_MAP_FROM;
9252 ret = true;
9253 break;
9255 case GOMP_MAP_DEVICE_RESIDENT:
9256 case GOMP_MAP_FORCE_DEVICEPTR:
9257 case GOMP_MAP_FORCE_PRESENT:
9258 case GOMP_MAP_LINK:
9259 case GOMP_MAP_POINTER:
9260 case GOMP_MAP_TO:
9261 break;
9263 default:
9264 gcc_unreachable ();
9265 break;
9268 if (ret)
9270 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9271 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9272 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9275 return c;
9278 /* Gimplify OACC_DECLARE. */
9280 static void
9281 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9283 tree expr = *expr_p;
9284 gomp_target *stmt;
9285 tree clauses, t, decl;
9287 clauses = OACC_DECLARE_CLAUSES (expr);
9289 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9290 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
9292 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9294 decl = OMP_CLAUSE_DECL (t);
9296 if (TREE_CODE (decl) == MEM_REF)
9297 decl = TREE_OPERAND (decl, 0);
9299 if (VAR_P (decl) && !is_oacc_declared (decl))
9301 tree attr = get_identifier ("oacc declare target");
9302 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9303 DECL_ATTRIBUTES (decl));
9306 if (VAR_P (decl)
9307 && !is_global_var (decl)
9308 && DECL_CONTEXT (decl) == current_function_decl)
9310 tree c = gimplify_oacc_declare_1 (t);
9311 if (c)
9313 if (oacc_declare_returns == NULL)
9314 oacc_declare_returns = new hash_map<tree, tree>;
9316 oacc_declare_returns->put (decl, c);
9320 if (gimplify_omp_ctxp)
9321 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9324 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9325 clauses);
9327 gimplify_seq_add_stmt (pre_p, stmt);
9329 *expr_p = NULL_TREE;
9332 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9333 gimplification of the body, as well as scanning the body for used
9334 variables. We need to do this scan now, because variable-sized
9335 decls will be decomposed during gimplification. */
9337 static void
9338 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9340 tree expr = *expr_p;
9341 gimple *g;
9342 gimple_seq body = NULL;
9344 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9345 OMP_PARALLEL_COMBINED (expr)
9346 ? ORT_COMBINED_PARALLEL
9347 : ORT_PARALLEL, OMP_PARALLEL);
9349 push_gimplify_context ();
9351 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9352 if (gimple_code (g) == GIMPLE_BIND)
9353 pop_gimplify_context (g);
9354 else
9355 pop_gimplify_context (NULL);
9357 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9358 OMP_PARALLEL);
9360 g = gimple_build_omp_parallel (body,
9361 OMP_PARALLEL_CLAUSES (expr),
9362 NULL_TREE, NULL_TREE);
9363 if (OMP_PARALLEL_COMBINED (expr))
9364 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9365 gimplify_seq_add_stmt (pre_p, g);
9366 *expr_p = NULL_TREE;
9369 /* Gimplify the contents of an OMP_TASK statement. This involves
9370 gimplification of the body, as well as scanning the body for used
9371 variables. We need to do this scan now, because variable-sized
9372 decls will be decomposed during gimplification. */
9374 static void
9375 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9377 tree expr = *expr_p;
9378 gimple *g;
9379 gimple_seq body = NULL;
9381 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9382 omp_find_clause (OMP_TASK_CLAUSES (expr),
9383 OMP_CLAUSE_UNTIED)
9384 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9386 push_gimplify_context ();
9388 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9389 if (gimple_code (g) == GIMPLE_BIND)
9390 pop_gimplify_context (g);
9391 else
9392 pop_gimplify_context (NULL);
9394 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9395 OMP_TASK);
9397 g = gimple_build_omp_task (body,
9398 OMP_TASK_CLAUSES (expr),
9399 NULL_TREE, NULL_TREE,
9400 NULL_TREE, NULL_TREE, NULL_TREE);
9401 gimplify_seq_add_stmt (pre_p, g);
9402 *expr_p = NULL_TREE;
9405 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9406 with non-NULL OMP_FOR_INIT. */
9408 static tree
9409 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9411 *walk_subtrees = 0;
9412 switch (TREE_CODE (*tp))
9414 case OMP_FOR:
9415 *walk_subtrees = 1;
9416 /* FALLTHRU */
9417 case OMP_SIMD:
9418 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9419 return *tp;
9420 break;
9421 case BIND_EXPR:
9422 case STATEMENT_LIST:
9423 case OMP_PARALLEL:
9424 *walk_subtrees = 1;
9425 break;
9426 default:
9427 break;
9429 return NULL_TREE;
9432 /* Gimplify the gross structure of an OMP_FOR statement. */
9434 static enum gimplify_status
9435 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9437 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9438 enum gimplify_status ret = GS_ALL_DONE;
9439 enum gimplify_status tret;
9440 gomp_for *gfor;
9441 gimple_seq for_body, for_pre_body;
9442 int i;
9443 bitmap has_decl_expr = NULL;
9444 enum omp_region_type ort = ORT_WORKSHARE;
9446 orig_for_stmt = for_stmt = *expr_p;
9448 switch (TREE_CODE (for_stmt))
9450 case OMP_FOR:
9451 case CILK_FOR:
9452 case OMP_DISTRIBUTE:
9453 break;
9454 case OACC_LOOP:
9455 ort = ORT_ACC;
9456 break;
9457 case OMP_TASKLOOP:
9458 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9459 ort = ORT_UNTIED_TASK;
9460 else
9461 ort = ORT_TASK;
9462 break;
9463 case OMP_SIMD:
9464 case CILK_SIMD:
9465 ort = ORT_SIMD;
9466 break;
9467 default:
9468 gcc_unreachable ();
9471 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9472 clause for the IV. */
9473 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9475 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9476 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9477 decl = TREE_OPERAND (t, 0);
9478 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9479 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9480 && OMP_CLAUSE_DECL (c) == decl)
9482 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9483 break;
9487 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9489 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9490 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9491 find_combined_omp_for, NULL, NULL);
9492 if (inner_for_stmt == NULL_TREE)
9494 gcc_assert (seen_error ());
9495 *expr_p = NULL_TREE;
9496 return GS_ERROR;
9500 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9501 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9502 TREE_CODE (for_stmt));
9504 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9505 gimplify_omp_ctxp->distribute = true;
9507 /* Handle OMP_FOR_INIT. */
9508 for_pre_body = NULL;
9509 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9511 has_decl_expr = BITMAP_ALLOC (NULL);
9512 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9513 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9514 == VAR_DECL)
9516 t = OMP_FOR_PRE_BODY (for_stmt);
9517 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9519 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9521 tree_stmt_iterator si;
9522 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9523 tsi_next (&si))
9525 t = tsi_stmt (si);
9526 if (TREE_CODE (t) == DECL_EXPR
9527 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9528 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9532 if (OMP_FOR_PRE_BODY (for_stmt))
9534 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9535 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9536 else
9538 struct gimplify_omp_ctx ctx;
9539 memset (&ctx, 0, sizeof (ctx));
9540 ctx.region_type = ORT_NONE;
9541 gimplify_omp_ctxp = &ctx;
9542 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9543 gimplify_omp_ctxp = NULL;
9546 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9548 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9549 for_stmt = inner_for_stmt;
9551 /* For taskloop, need to gimplify the start, end and step before the
9552 taskloop, outside of the taskloop omp context. */
9553 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9555 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9557 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9558 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9560 TREE_OPERAND (t, 1)
9561 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9562 pre_p, NULL, false);
9563 tree c = build_omp_clause (input_location,
9564 OMP_CLAUSE_FIRSTPRIVATE);
9565 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9566 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9567 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9570 /* Handle OMP_FOR_COND. */
9571 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9572 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9574 TREE_OPERAND (t, 1)
9575 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9576 gimple_seq_empty_p (for_pre_body)
9577 ? pre_p : &for_pre_body, NULL,
9578 false);
9579 tree c = build_omp_clause (input_location,
9580 OMP_CLAUSE_FIRSTPRIVATE);
9581 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9582 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9583 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9586 /* Handle OMP_FOR_INCR. */
9587 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9588 if (TREE_CODE (t) == MODIFY_EXPR)
9590 decl = TREE_OPERAND (t, 0);
9591 t = TREE_OPERAND (t, 1);
9592 tree *tp = &TREE_OPERAND (t, 1);
9593 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9594 tp = &TREE_OPERAND (t, 0);
9596 if (!is_gimple_constant (*tp))
9598 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9599 ? pre_p : &for_pre_body;
9600 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
9601 tree c = build_omp_clause (input_location,
9602 OMP_CLAUSE_FIRSTPRIVATE);
9603 OMP_CLAUSE_DECL (c) = *tp;
9604 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9605 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9610 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9611 OMP_TASKLOOP);
9614 if (orig_for_stmt != for_stmt)
9615 gimplify_omp_ctxp->combined_loop = true;
9617 for_body = NULL;
9618 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9619 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9620 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9621 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9623 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9624 bool is_doacross = false;
9625 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9627 is_doacross = true;
9628 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9629 (OMP_FOR_INIT (for_stmt))
9630 * 2);
9632 int collapse = 1, tile = 0;
9633 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9634 if (c)
9635 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9636 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9637 if (c)
9638 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
9639 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9641 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9642 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9643 decl = TREE_OPERAND (t, 0);
9644 gcc_assert (DECL_P (decl));
9645 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9646 || POINTER_TYPE_P (TREE_TYPE (decl)));
9647 if (is_doacross)
9649 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9650 gimplify_omp_ctxp->loop_iter_var.quick_push
9651 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9652 else
9653 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9654 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9657 /* Make sure the iteration variable is private. */
9658 tree c = NULL_TREE;
9659 tree c2 = NULL_TREE;
9660 if (orig_for_stmt != for_stmt)
9661 /* Do this only on innermost construct for combined ones. */;
9662 else if (ort == ORT_SIMD)
9664 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9665 (splay_tree_key) decl);
9666 omp_is_private (gimplify_omp_ctxp, decl,
9667 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9668 != 1));
9669 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9670 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9671 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9673 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9674 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9675 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9676 if (has_decl_expr
9677 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9679 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9680 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9682 struct gimplify_omp_ctx *outer
9683 = gimplify_omp_ctxp->outer_context;
9684 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9686 if (outer->region_type == ORT_WORKSHARE
9687 && outer->combined_loop)
9689 n = splay_tree_lookup (outer->variables,
9690 (splay_tree_key)decl);
9691 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9693 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9694 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9696 else
9698 struct gimplify_omp_ctx *octx = outer->outer_context;
9699 if (octx
9700 && octx->region_type == ORT_COMBINED_PARALLEL
9701 && octx->outer_context
9702 && (octx->outer_context->region_type
9703 == ORT_WORKSHARE)
9704 && octx->outer_context->combined_loop)
9706 octx = octx->outer_context;
9707 n = splay_tree_lookup (octx->variables,
9708 (splay_tree_key)decl);
9709 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9711 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9712 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9719 OMP_CLAUSE_DECL (c) = decl;
9720 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9721 OMP_FOR_CLAUSES (for_stmt) = c;
9722 omp_add_variable (gimplify_omp_ctxp, decl, flags);
9723 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9725 if (outer->region_type == ORT_WORKSHARE
9726 && outer->combined_loop)
9728 if (outer->outer_context
9729 && (outer->outer_context->region_type
9730 == ORT_COMBINED_PARALLEL))
9731 outer = outer->outer_context;
9732 else if (omp_check_private (outer, decl, false))
9733 outer = NULL;
9735 else if (((outer->region_type & ORT_TASK) != 0)
9736 && outer->combined_loop
9737 && !omp_check_private (gimplify_omp_ctxp,
9738 decl, false))
9740 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9742 omp_notice_variable (outer, decl, true);
9743 outer = NULL;
9745 if (outer)
9747 n = splay_tree_lookup (outer->variables,
9748 (splay_tree_key)decl);
9749 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9751 omp_add_variable (outer, decl,
9752 GOVD_LASTPRIVATE | GOVD_SEEN);
9753 if (outer->region_type == ORT_COMBINED_PARALLEL
9754 && outer->outer_context
9755 && (outer->outer_context->region_type
9756 == ORT_WORKSHARE)
9757 && outer->outer_context->combined_loop)
9759 outer = outer->outer_context;
9760 n = splay_tree_lookup (outer->variables,
9761 (splay_tree_key)decl);
9762 if (omp_check_private (outer, decl, false))
9763 outer = NULL;
9764 else if (n == NULL
9765 || ((n->value & GOVD_DATA_SHARE_CLASS)
9766 == 0))
9767 omp_add_variable (outer, decl,
9768 GOVD_LASTPRIVATE
9769 | GOVD_SEEN);
9770 else
9771 outer = NULL;
9773 if (outer && outer->outer_context
9774 && (outer->outer_context->region_type
9775 == ORT_COMBINED_TEAMS))
9777 outer = outer->outer_context;
9778 n = splay_tree_lookup (outer->variables,
9779 (splay_tree_key)decl);
9780 if (n == NULL
9781 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9782 omp_add_variable (outer, decl,
9783 GOVD_SHARED | GOVD_SEEN);
9784 else
9785 outer = NULL;
9787 if (outer && outer->outer_context)
9788 omp_notice_variable (outer->outer_context, decl,
9789 true);
9794 else
9796 bool lastprivate
9797 = (!has_decl_expr
9798 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
9799 struct gimplify_omp_ctx *outer
9800 = gimplify_omp_ctxp->outer_context;
9801 if (outer && lastprivate)
9803 if (outer->region_type == ORT_WORKSHARE
9804 && outer->combined_loop)
9806 n = splay_tree_lookup (outer->variables,
9807 (splay_tree_key)decl);
9808 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9810 lastprivate = false;
9811 outer = NULL;
9813 else if (outer->outer_context
9814 && (outer->outer_context->region_type
9815 == ORT_COMBINED_PARALLEL))
9816 outer = outer->outer_context;
9817 else if (omp_check_private (outer, decl, false))
9818 outer = NULL;
9820 else if (((outer->region_type & ORT_TASK) != 0)
9821 && outer->combined_loop
9822 && !omp_check_private (gimplify_omp_ctxp,
9823 decl, false))
9825 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9827 omp_notice_variable (outer, decl, true);
9828 outer = NULL;
9830 if (outer)
9832 n = splay_tree_lookup (outer->variables,
9833 (splay_tree_key)decl);
9834 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9836 omp_add_variable (outer, decl,
9837 GOVD_LASTPRIVATE | GOVD_SEEN);
9838 if (outer->region_type == ORT_COMBINED_PARALLEL
9839 && outer->outer_context
9840 && (outer->outer_context->region_type
9841 == ORT_WORKSHARE)
9842 && outer->outer_context->combined_loop)
9844 outer = outer->outer_context;
9845 n = splay_tree_lookup (outer->variables,
9846 (splay_tree_key)decl);
9847 if (omp_check_private (outer, decl, false))
9848 outer = NULL;
9849 else if (n == NULL
9850 || ((n->value & GOVD_DATA_SHARE_CLASS)
9851 == 0))
9852 omp_add_variable (outer, decl,
9853 GOVD_LASTPRIVATE
9854 | GOVD_SEEN);
9855 else
9856 outer = NULL;
9858 if (outer && outer->outer_context
9859 && (outer->outer_context->region_type
9860 == ORT_COMBINED_TEAMS))
9862 outer = outer->outer_context;
9863 n = splay_tree_lookup (outer->variables,
9864 (splay_tree_key)decl);
9865 if (n == NULL
9866 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9867 omp_add_variable (outer, decl,
9868 GOVD_SHARED | GOVD_SEEN);
9869 else
9870 outer = NULL;
9872 if (outer && outer->outer_context)
9873 omp_notice_variable (outer->outer_context, decl,
9874 true);
9879 c = build_omp_clause (input_location,
9880 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9881 : OMP_CLAUSE_PRIVATE);
9882 OMP_CLAUSE_DECL (c) = decl;
9883 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9884 OMP_FOR_CLAUSES (for_stmt) = c;
9885 omp_add_variable (gimplify_omp_ctxp, decl,
9886 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9887 | GOVD_EXPLICIT | GOVD_SEEN);
9888 c = NULL_TREE;
9891 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9892 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9893 else
9894 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9896 /* If DECL is not a gimple register, create a temporary variable to act
9897 as an iteration counter. This is valid, since DECL cannot be
9898 modified in the body of the loop. Similarly for any iteration vars
9899 in simd with collapse > 1 where the iterator vars must be
9900 lastprivate. */
9901 if (orig_for_stmt != for_stmt)
9902 var = decl;
9903 else if (!is_gimple_reg (decl)
9904 || (ort == ORT_SIMD
9905 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9907 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9908 /* Make sure omp_add_variable is not called on it prematurely.
9909 We call it ourselves a few lines later. */
9910 gimplify_omp_ctxp = NULL;
9911 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9912 gimplify_omp_ctxp = ctx;
9913 TREE_OPERAND (t, 0) = var;
9915 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9917 if (ort == ORT_SIMD
9918 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9920 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9921 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9922 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9923 OMP_CLAUSE_DECL (c2) = var;
9924 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9925 OMP_FOR_CLAUSES (for_stmt) = c2;
9926 omp_add_variable (gimplify_omp_ctxp, var,
9927 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
9928 if (c == NULL_TREE)
9930 c = c2;
9931 c2 = NULL_TREE;
9934 else
9935 omp_add_variable (gimplify_omp_ctxp, var,
9936 GOVD_PRIVATE | GOVD_SEEN);
9938 else
9939 var = decl;
9941 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9942 is_gimple_val, fb_rvalue, false);
9943 ret = MIN (ret, tret);
9944 if (ret == GS_ERROR)
9945 return ret;
9947 /* Handle OMP_FOR_COND. */
9948 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9949 gcc_assert (COMPARISON_CLASS_P (t));
9950 gcc_assert (TREE_OPERAND (t, 0) == decl);
9952 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9953 is_gimple_val, fb_rvalue, false);
9954 ret = MIN (ret, tret);
9956 /* Handle OMP_FOR_INCR. */
9957 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9958 switch (TREE_CODE (t))
9960 case PREINCREMENT_EXPR:
9961 case POSTINCREMENT_EXPR:
9963 tree decl = TREE_OPERAND (t, 0);
9964 /* c_omp_for_incr_canonicalize_ptr() should have been
9965 called to massage things appropriately. */
9966 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
9968 if (orig_for_stmt != for_stmt)
9969 break;
9970 t = build_int_cst (TREE_TYPE (decl), 1);
9971 if (c)
9972 OMP_CLAUSE_LINEAR_STEP (c) = t;
9973 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
9974 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
9975 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
9976 break;
9979 case PREDECREMENT_EXPR:
9980 case POSTDECREMENT_EXPR:
9981 /* c_omp_for_incr_canonicalize_ptr() should have been
9982 called to massage things appropriately. */
9983 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
9984 if (orig_for_stmt != for_stmt)
9985 break;
9986 t = build_int_cst (TREE_TYPE (decl), -1);
9987 if (c)
9988 OMP_CLAUSE_LINEAR_STEP (c) = t;
9989 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
9990 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
9991 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
9992 break;
9994 case MODIFY_EXPR:
9995 gcc_assert (TREE_OPERAND (t, 0) == decl);
9996 TREE_OPERAND (t, 0) = var;
9998 t = TREE_OPERAND (t, 1);
9999 switch (TREE_CODE (t))
10001 case PLUS_EXPR:
10002 if (TREE_OPERAND (t, 1) == decl)
10004 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
10005 TREE_OPERAND (t, 0) = var;
10006 break;
10009 /* Fallthru. */
10010 case MINUS_EXPR:
10011 case POINTER_PLUS_EXPR:
10012 gcc_assert (TREE_OPERAND (t, 0) == decl);
10013 TREE_OPERAND (t, 0) = var;
10014 break;
10015 default:
10016 gcc_unreachable ();
10019 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10020 is_gimple_val, fb_rvalue, false);
10021 ret = MIN (ret, tret);
10022 if (c)
10024 tree step = TREE_OPERAND (t, 1);
10025 tree stept = TREE_TYPE (decl);
10026 if (POINTER_TYPE_P (stept))
10027 stept = sizetype;
10028 step = fold_convert (stept, step);
10029 if (TREE_CODE (t) == MINUS_EXPR)
10030 step = fold_build1 (NEGATE_EXPR, stept, step);
10031 OMP_CLAUSE_LINEAR_STEP (c) = step;
10032 if (step != TREE_OPERAND (t, 1))
10034 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10035 &for_pre_body, NULL,
10036 is_gimple_val, fb_rvalue, false);
10037 ret = MIN (ret, tret);
10040 break;
10042 default:
10043 gcc_unreachable ();
10046 if (c2)
10048 gcc_assert (c);
10049 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10052 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
10054 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
10055 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10056 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10057 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10058 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10059 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10060 && OMP_CLAUSE_DECL (c) == decl)
10062 if (is_doacross && (collapse == 1 || i >= collapse))
10063 t = var;
10064 else
10066 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10067 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10068 gcc_assert (TREE_OPERAND (t, 0) == var);
10069 t = TREE_OPERAND (t, 1);
10070 gcc_assert (TREE_CODE (t) == PLUS_EXPR
10071 || TREE_CODE (t) == MINUS_EXPR
10072 || TREE_CODE (t) == POINTER_PLUS_EXPR);
10073 gcc_assert (TREE_OPERAND (t, 0) == var);
10074 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10075 is_doacross ? var : decl,
10076 TREE_OPERAND (t, 1));
10078 gimple_seq *seq;
10079 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10080 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10081 else
10082 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10083 gimplify_assign (decl, t, seq);
10088 BITMAP_FREE (has_decl_expr);
10090 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10092 push_gimplify_context ();
10093 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10095 OMP_FOR_BODY (orig_for_stmt)
10096 = build3 (BIND_EXPR, void_type_node, NULL,
10097 OMP_FOR_BODY (orig_for_stmt), NULL);
10098 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10102 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10103 &for_body);
10105 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10107 if (gimple_code (g) == GIMPLE_BIND)
10108 pop_gimplify_context (g);
10109 else
10110 pop_gimplify_context (NULL);
10113 if (orig_for_stmt != for_stmt)
10114 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10116 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10117 decl = TREE_OPERAND (t, 0);
10118 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10119 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10120 gimplify_omp_ctxp = ctx->outer_context;
10121 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10122 gimplify_omp_ctxp = ctx;
10123 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10124 TREE_OPERAND (t, 0) = var;
10125 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10126 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10127 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10130 gimplify_adjust_omp_clauses (pre_p, for_body,
10131 &OMP_FOR_CLAUSES (orig_for_stmt),
10132 TREE_CODE (orig_for_stmt));
10134 int kind;
10135 switch (TREE_CODE (orig_for_stmt))
10137 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10138 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10139 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
10140 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
10141 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10142 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10143 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10144 default:
10145 gcc_unreachable ();
10147 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10148 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10149 for_pre_body);
10150 if (orig_for_stmt != for_stmt)
10151 gimple_omp_for_set_combined_p (gfor, true);
10152 if (gimplify_omp_ctxp
10153 && (gimplify_omp_ctxp->combined_loop
10154 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10155 && gimplify_omp_ctxp->outer_context
10156 && gimplify_omp_ctxp->outer_context->combined_loop)))
10158 gimple_omp_for_set_combined_into_p (gfor, true);
10159 if (gimplify_omp_ctxp->combined_loop)
10160 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10161 else
10162 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10165 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10167 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10168 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10169 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10170 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10171 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10172 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10173 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10174 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10177 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10178 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10179 The outer taskloop stands for computing the number of iterations,
10180 counts for collapsed loops and holding taskloop specific clauses.
10181 The task construct stands for the effect of data sharing on the
10182 explicit task it creates and the inner taskloop stands for expansion
10183 of the static loop inside of the explicit task construct. */
10184 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10186 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10187 tree task_clauses = NULL_TREE;
10188 tree c = *gfor_clauses_ptr;
10189 tree *gtask_clauses_ptr = &task_clauses;
10190 tree outer_for_clauses = NULL_TREE;
10191 tree *gforo_clauses_ptr = &outer_for_clauses;
10192 for (; c; c = OMP_CLAUSE_CHAIN (c))
10193 switch (OMP_CLAUSE_CODE (c))
10195 /* These clauses are allowed on task, move them there. */
10196 case OMP_CLAUSE_SHARED:
10197 case OMP_CLAUSE_FIRSTPRIVATE:
10198 case OMP_CLAUSE_DEFAULT:
10199 case OMP_CLAUSE_IF:
10200 case OMP_CLAUSE_UNTIED:
10201 case OMP_CLAUSE_FINAL:
10202 case OMP_CLAUSE_MERGEABLE:
10203 case OMP_CLAUSE_PRIORITY:
10204 *gtask_clauses_ptr = c;
10205 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10206 break;
10207 case OMP_CLAUSE_PRIVATE:
10208 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10210 /* We want private on outer for and firstprivate
10211 on task. */
10212 *gtask_clauses_ptr
10213 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10214 OMP_CLAUSE_FIRSTPRIVATE);
10215 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10216 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10217 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10218 *gforo_clauses_ptr = c;
10219 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10221 else
10223 *gtask_clauses_ptr = c;
10224 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10226 break;
10227 /* These clauses go into outer taskloop clauses. */
10228 case OMP_CLAUSE_GRAINSIZE:
10229 case OMP_CLAUSE_NUM_TASKS:
10230 case OMP_CLAUSE_NOGROUP:
10231 *gforo_clauses_ptr = c;
10232 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10233 break;
10234 /* Taskloop clause we duplicate on both taskloops. */
10235 case OMP_CLAUSE_COLLAPSE:
10236 *gfor_clauses_ptr = c;
10237 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10238 *gforo_clauses_ptr = copy_node (c);
10239 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10240 break;
10241 /* For lastprivate, keep the clause on inner taskloop, and add
10242 a shared clause on task. If the same decl is also firstprivate,
10243 add also firstprivate clause on the inner taskloop. */
10244 case OMP_CLAUSE_LASTPRIVATE:
10245 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10247 /* For taskloop C++ lastprivate IVs, we want:
10248 1) private on outer taskloop
10249 2) firstprivate and shared on task
10250 3) lastprivate on inner taskloop */
10251 *gtask_clauses_ptr
10252 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10253 OMP_CLAUSE_FIRSTPRIVATE);
10254 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10255 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10256 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10257 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10258 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10259 OMP_CLAUSE_PRIVATE);
10260 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10261 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10262 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10263 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10265 *gfor_clauses_ptr = c;
10266 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10267 *gtask_clauses_ptr
10268 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10269 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10270 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10271 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10272 gtask_clauses_ptr
10273 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10274 break;
10275 default:
10276 gcc_unreachable ();
10278 *gfor_clauses_ptr = NULL_TREE;
10279 *gtask_clauses_ptr = NULL_TREE;
10280 *gforo_clauses_ptr = NULL_TREE;
10281 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10282 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10283 NULL_TREE, NULL_TREE, NULL_TREE);
10284 gimple_omp_task_set_taskloop_p (g, true);
10285 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10286 gomp_for *gforo
10287 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10288 gimple_omp_for_collapse (gfor),
10289 gimple_omp_for_pre_body (gfor));
10290 gimple_omp_for_set_pre_body (gfor, NULL);
10291 gimple_omp_for_set_combined_p (gforo, true);
10292 gimple_omp_for_set_combined_into_p (gfor, true);
10293 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10295 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10296 tree v = create_tmp_var (type);
10297 gimple_omp_for_set_index (gforo, i, v);
10298 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10299 gimple_omp_for_set_initial (gforo, i, t);
10300 gimple_omp_for_set_cond (gforo, i,
10301 gimple_omp_for_cond (gfor, i));
10302 t = unshare_expr (gimple_omp_for_final (gfor, i));
10303 gimple_omp_for_set_final (gforo, i, t);
10304 t = unshare_expr (gimple_omp_for_incr (gfor, i));
10305 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10306 TREE_OPERAND (t, 0) = v;
10307 gimple_omp_for_set_incr (gforo, i, t);
10308 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10309 OMP_CLAUSE_DECL (t) = v;
10310 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10311 gimple_omp_for_set_clauses (gforo, t);
10313 gimplify_seq_add_stmt (pre_p, gforo);
10315 else
10316 gimplify_seq_add_stmt (pre_p, gfor);
10317 if (ret != GS_ALL_DONE)
10318 return GS_ERROR;
10319 *expr_p = NULL_TREE;
10320 return GS_ALL_DONE;
10323 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10324 of OMP_TARGET's body. */
10326 static tree
10327 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10329 *walk_subtrees = 0;
10330 switch (TREE_CODE (*tp))
10332 case OMP_TEAMS:
10333 return *tp;
10334 case BIND_EXPR:
10335 case STATEMENT_LIST:
10336 *walk_subtrees = 1;
10337 break;
10338 default:
10339 break;
10341 return NULL_TREE;
10344 /* Helper function of optimize_target_teams, determine if the expression
10345 can be computed safely before the target construct on the host. */
10347 static tree
10348 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10350 splay_tree_node n;
10352 if (TYPE_P (*tp))
10354 *walk_subtrees = 0;
10355 return NULL_TREE;
10357 switch (TREE_CODE (*tp))
10359 case VAR_DECL:
10360 case PARM_DECL:
10361 case RESULT_DECL:
10362 *walk_subtrees = 0;
10363 if (error_operand_p (*tp)
10364 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10365 || DECL_HAS_VALUE_EXPR_P (*tp)
10366 || DECL_THREAD_LOCAL_P (*tp)
10367 || TREE_SIDE_EFFECTS (*tp)
10368 || TREE_THIS_VOLATILE (*tp))
10369 return *tp;
10370 if (is_global_var (*tp)
10371 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10372 || lookup_attribute ("omp declare target link",
10373 DECL_ATTRIBUTES (*tp))))
10374 return *tp;
10375 if (VAR_P (*tp)
10376 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10377 && !is_global_var (*tp)
10378 && decl_function_context (*tp) == current_function_decl)
10379 return *tp;
10380 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10381 (splay_tree_key) *tp);
10382 if (n == NULL)
10384 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10385 return NULL_TREE;
10386 return *tp;
10388 else if (n->value & GOVD_LOCAL)
10389 return *tp;
10390 else if (n->value & GOVD_FIRSTPRIVATE)
10391 return NULL_TREE;
10392 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10393 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10394 return NULL_TREE;
10395 return *tp;
10396 case INTEGER_CST:
10397 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10398 return *tp;
10399 return NULL_TREE;
10400 case TARGET_EXPR:
10401 if (TARGET_EXPR_INITIAL (*tp)
10402 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10403 return *tp;
10404 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10405 walk_subtrees, NULL);
10406 /* Allow some reasonable subset of integral arithmetics. */
10407 case PLUS_EXPR:
10408 case MINUS_EXPR:
10409 case MULT_EXPR:
10410 case TRUNC_DIV_EXPR:
10411 case CEIL_DIV_EXPR:
10412 case FLOOR_DIV_EXPR:
10413 case ROUND_DIV_EXPR:
10414 case TRUNC_MOD_EXPR:
10415 case CEIL_MOD_EXPR:
10416 case FLOOR_MOD_EXPR:
10417 case ROUND_MOD_EXPR:
10418 case RDIV_EXPR:
10419 case EXACT_DIV_EXPR:
10420 case MIN_EXPR:
10421 case MAX_EXPR:
10422 case LSHIFT_EXPR:
10423 case RSHIFT_EXPR:
10424 case BIT_IOR_EXPR:
10425 case BIT_XOR_EXPR:
10426 case BIT_AND_EXPR:
10427 case NEGATE_EXPR:
10428 case ABS_EXPR:
10429 case BIT_NOT_EXPR:
10430 case NON_LVALUE_EXPR:
10431 CASE_CONVERT:
10432 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10433 return *tp;
10434 return NULL_TREE;
10435 /* And disallow anything else, except for comparisons. */
10436 default:
10437 if (COMPARISON_CLASS_P (*tp))
10438 return NULL_TREE;
10439 return *tp;
10443 /* Try to determine if the num_teams and/or thread_limit expressions
10444 can have their values determined already before entering the
10445 target construct.
10446 INTEGER_CSTs trivially are,
10447 integral decls that are firstprivate (explicitly or implicitly)
10448 or explicitly map(always, to:) or map(always, tofrom:) on the target
10449 region too, and expressions involving simple arithmetics on those
10450 too, function calls are not ok, dereferencing something neither etc.
10451 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10452 EXPR based on what we find:
10453 0 stands for clause not specified at all, use implementation default
10454 -1 stands for value that can't be determined easily before entering
10455 the target construct.
10456 If teams construct is not present at all, use 1 for num_teams
10457 and 0 for thread_limit (only one team is involved, and the thread
10458 limit is implementation defined. */
10460 static void
10461 optimize_target_teams (tree target, gimple_seq *pre_p)
10463 tree body = OMP_BODY (target);
10464 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10465 tree num_teams = integer_zero_node;
10466 tree thread_limit = integer_zero_node;
10467 location_t num_teams_loc = EXPR_LOCATION (target);
10468 location_t thread_limit_loc = EXPR_LOCATION (target);
10469 tree c, *p, expr;
10470 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10472 if (teams == NULL_TREE)
10473 num_teams = integer_one_node;
10474 else
10475 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10477 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10479 p = &num_teams;
10480 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10482 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10484 p = &thread_limit;
10485 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10487 else
10488 continue;
10489 expr = OMP_CLAUSE_OPERAND (c, 0);
10490 if (TREE_CODE (expr) == INTEGER_CST)
10492 *p = expr;
10493 continue;
10495 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10497 *p = integer_minus_one_node;
10498 continue;
10500 *p = expr;
10501 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10502 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
10503 == GS_ERROR)
10505 gimplify_omp_ctxp = target_ctx;
10506 *p = integer_minus_one_node;
10507 continue;
10509 gimplify_omp_ctxp = target_ctx;
10510 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10511 OMP_CLAUSE_OPERAND (c, 0) = *p;
10513 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10514 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10515 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10516 OMP_TARGET_CLAUSES (target) = c;
10517 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10518 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10519 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10520 OMP_TARGET_CLAUSES (target) = c;
10523 /* Gimplify the gross structure of several OMP constructs. */
10525 static void
10526 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10528 tree expr = *expr_p;
10529 gimple *stmt;
10530 gimple_seq body = NULL;
10531 enum omp_region_type ort;
10533 switch (TREE_CODE (expr))
10535 case OMP_SECTIONS:
10536 case OMP_SINGLE:
10537 ort = ORT_WORKSHARE;
10538 break;
10539 case OMP_TARGET:
10540 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10541 break;
10542 case OACC_KERNELS:
10543 ort = ORT_ACC_KERNELS;
10544 break;
10545 case OACC_PARALLEL:
10546 ort = ORT_ACC_PARALLEL;
10547 break;
10548 case OACC_DATA:
10549 ort = ORT_ACC_DATA;
10550 break;
10551 case OMP_TARGET_DATA:
10552 ort = ORT_TARGET_DATA;
10553 break;
10554 case OMP_TEAMS:
10555 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10556 break;
10557 case OACC_HOST_DATA:
10558 ort = ORT_ACC_HOST_DATA;
10559 break;
10560 default:
10561 gcc_unreachable ();
10563 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10564 TREE_CODE (expr));
10565 if (TREE_CODE (expr) == OMP_TARGET)
10566 optimize_target_teams (expr, pre_p);
10567 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10569 push_gimplify_context ();
10570 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10571 if (gimple_code (g) == GIMPLE_BIND)
10572 pop_gimplify_context (g);
10573 else
10574 pop_gimplify_context (NULL);
10575 if ((ort & ORT_TARGET_DATA) != 0)
10577 enum built_in_function end_ix;
10578 switch (TREE_CODE (expr))
10580 case OACC_DATA:
10581 case OACC_HOST_DATA:
10582 end_ix = BUILT_IN_GOACC_DATA_END;
10583 break;
10584 case OMP_TARGET_DATA:
10585 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10586 break;
10587 default:
10588 gcc_unreachable ();
10590 tree fn = builtin_decl_explicit (end_ix);
10591 g = gimple_build_call (fn, 0);
10592 gimple_seq cleanup = NULL;
10593 gimple_seq_add_stmt (&cleanup, g);
10594 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10595 body = NULL;
10596 gimple_seq_add_stmt (&body, g);
10599 else
10600 gimplify_and_add (OMP_BODY (expr), &body);
10601 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10602 TREE_CODE (expr));
10604 switch (TREE_CODE (expr))
10606 case OACC_DATA:
10607 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10608 OMP_CLAUSES (expr));
10609 break;
10610 case OACC_KERNELS:
10611 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10612 OMP_CLAUSES (expr));
10613 break;
10614 case OACC_HOST_DATA:
10615 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10616 OMP_CLAUSES (expr));
10617 break;
10618 case OACC_PARALLEL:
10619 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10620 OMP_CLAUSES (expr));
10621 break;
10622 case OMP_SECTIONS:
10623 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10624 break;
10625 case OMP_SINGLE:
10626 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10627 break;
10628 case OMP_TARGET:
10629 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10630 OMP_CLAUSES (expr));
10631 break;
10632 case OMP_TARGET_DATA:
10633 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10634 OMP_CLAUSES (expr));
10635 break;
10636 case OMP_TEAMS:
10637 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10638 break;
10639 default:
10640 gcc_unreachable ();
10643 gimplify_seq_add_stmt (pre_p, stmt);
10644 *expr_p = NULL_TREE;
10647 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10648 target update constructs. */
10650 static void
10651 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10653 tree expr = *expr_p;
10654 int kind;
10655 gomp_target *stmt;
10656 enum omp_region_type ort = ORT_WORKSHARE;
10658 switch (TREE_CODE (expr))
10660 case OACC_ENTER_DATA:
10661 case OACC_EXIT_DATA:
10662 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10663 ort = ORT_ACC;
10664 break;
10665 case OACC_UPDATE:
10666 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10667 ort = ORT_ACC;
10668 break;
10669 case OMP_TARGET_UPDATE:
10670 kind = GF_OMP_TARGET_KIND_UPDATE;
10671 break;
10672 case OMP_TARGET_ENTER_DATA:
10673 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10674 break;
10675 case OMP_TARGET_EXIT_DATA:
10676 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10677 break;
10678 default:
10679 gcc_unreachable ();
10681 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10682 ort, TREE_CODE (expr));
10683 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10684 TREE_CODE (expr));
10685 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10687 gimplify_seq_add_stmt (pre_p, stmt);
10688 *expr_p = NULL_TREE;
10691 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10692 stabilized the lhs of the atomic operation as *ADDR. Return true if
10693 EXPR is this stabilized form. */
10695 static bool
10696 goa_lhs_expr_p (tree expr, tree addr)
10698 /* Also include casts to other type variants. The C front end is fond
10699 of adding these for e.g. volatile variables. This is like
10700 STRIP_TYPE_NOPS but includes the main variant lookup. */
10701 STRIP_USELESS_TYPE_CONVERSION (expr);
10703 if (TREE_CODE (expr) == INDIRECT_REF)
10705 expr = TREE_OPERAND (expr, 0);
10706 while (expr != addr
10707 && (CONVERT_EXPR_P (expr)
10708 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10709 && TREE_CODE (expr) == TREE_CODE (addr)
10710 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10712 expr = TREE_OPERAND (expr, 0);
10713 addr = TREE_OPERAND (addr, 0);
10715 if (expr == addr)
10716 return true;
10717 return (TREE_CODE (addr) == ADDR_EXPR
10718 && TREE_CODE (expr) == ADDR_EXPR
10719 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10721 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10722 return true;
10723 return false;
10726 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10727 expression does not involve the lhs, evaluate it into a temporary.
10728 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10729 or -1 if an error was encountered. */
10731 static int
10732 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
10733 tree lhs_var)
10735 tree expr = *expr_p;
10736 int saw_lhs;
10738 if (goa_lhs_expr_p (expr, lhs_addr))
10740 *expr_p = lhs_var;
10741 return 1;
10743 if (is_gimple_val (expr))
10744 return 0;
10746 saw_lhs = 0;
10747 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10749 case tcc_binary:
10750 case tcc_comparison:
10751 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10752 lhs_var);
10753 /* FALLTHRU */
10754 case tcc_unary:
10755 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10756 lhs_var);
10757 break;
10758 case tcc_expression:
10759 switch (TREE_CODE (expr))
10761 case TRUTH_ANDIF_EXPR:
10762 case TRUTH_ORIF_EXPR:
10763 case TRUTH_AND_EXPR:
10764 case TRUTH_OR_EXPR:
10765 case TRUTH_XOR_EXPR:
10766 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10767 lhs_addr, lhs_var);
10768 /* FALLTHRU */
10769 case TRUTH_NOT_EXPR:
10770 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10771 lhs_addr, lhs_var);
10772 break;
10773 case COMPOUND_EXPR:
10774 /* Break out any preevaluations from cp_build_modify_expr. */
10775 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10776 expr = TREE_OPERAND (expr, 1))
10777 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10778 *expr_p = expr;
10779 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
10780 default:
10781 break;
10783 break;
10784 default:
10785 break;
10788 if (saw_lhs == 0)
10790 enum gimplify_status gs;
10791 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10792 if (gs != GS_ALL_DONE)
10793 saw_lhs = -1;
10796 return saw_lhs;
10799 /* Gimplify an OMP_ATOMIC statement. */
10801 static enum gimplify_status
10802 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
10804 tree addr = TREE_OPERAND (*expr_p, 0);
10805 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10806 ? NULL : TREE_OPERAND (*expr_p, 1);
10807 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
10808 tree tmp_load;
10809 gomp_atomic_load *loadstmt;
10810 gomp_atomic_store *storestmt;
10812 tmp_load = create_tmp_reg (type);
10813 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10814 return GS_ERROR;
10816 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10817 != GS_ALL_DONE)
10818 return GS_ERROR;
10820 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10821 gimplify_seq_add_stmt (pre_p, loadstmt);
10822 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10823 != GS_ALL_DONE)
10824 return GS_ERROR;
10826 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10827 rhs = tmp_load;
10828 storestmt = gimple_build_omp_atomic_store (rhs);
10829 gimplify_seq_add_stmt (pre_p, storestmt);
10830 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10832 gimple_omp_atomic_set_seq_cst (loadstmt);
10833 gimple_omp_atomic_set_seq_cst (storestmt);
10835 switch (TREE_CODE (*expr_p))
10837 case OMP_ATOMIC_READ:
10838 case OMP_ATOMIC_CAPTURE_OLD:
10839 *expr_p = tmp_load;
10840 gimple_omp_atomic_set_need_value (loadstmt);
10841 break;
10842 case OMP_ATOMIC_CAPTURE_NEW:
10843 *expr_p = rhs;
10844 gimple_omp_atomic_set_need_value (storestmt);
10845 break;
10846 default:
10847 *expr_p = NULL;
10848 break;
10851 return GS_ALL_DONE;
10854 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10855 body, and adding some EH bits. */
10857 static enum gimplify_status
10858 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10860 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10861 gimple *body_stmt;
10862 gtransaction *trans_stmt;
10863 gimple_seq body = NULL;
10864 int subcode = 0;
10866 /* Wrap the transaction body in a BIND_EXPR so we have a context
10867 where to put decls for OMP. */
10868 if (TREE_CODE (tbody) != BIND_EXPR)
10870 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10871 TREE_SIDE_EFFECTS (bind) = 1;
10872 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10873 TRANSACTION_EXPR_BODY (expr) = bind;
10876 push_gimplify_context ();
10877 temp = voidify_wrapper_expr (*expr_p, NULL);
10879 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10880 pop_gimplify_context (body_stmt);
10882 trans_stmt = gimple_build_transaction (body);
10883 if (TRANSACTION_EXPR_OUTER (expr))
10884 subcode = GTMA_IS_OUTER;
10885 else if (TRANSACTION_EXPR_RELAXED (expr))
10886 subcode = GTMA_IS_RELAXED;
10887 gimple_transaction_set_subcode (trans_stmt, subcode);
10889 gimplify_seq_add_stmt (pre_p, trans_stmt);
10891 if (temp)
10893 *expr_p = temp;
10894 return GS_OK;
10897 *expr_p = NULL_TREE;
10898 return GS_ALL_DONE;
10901 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10902 is the OMP_BODY of the original EXPR (which has already been
10903 gimplified so it's not present in the EXPR).
10905 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10907 static gimple *
10908 gimplify_omp_ordered (tree expr, gimple_seq body)
10910 tree c, decls;
10911 int failures = 0;
10912 unsigned int i;
10913 tree source_c = NULL_TREE;
10914 tree sink_c = NULL_TREE;
10916 if (gimplify_omp_ctxp)
10918 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10919 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10920 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10921 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
10922 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
10924 error_at (OMP_CLAUSE_LOCATION (c),
10925 "%<ordered%> construct with %<depend%> clause must be "
10926 "closely nested inside a loop with %<ordered%> clause "
10927 "with a parameter");
10928 failures++;
10930 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10931 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
10933 bool fail = false;
10934 for (decls = OMP_CLAUSE_DECL (c), i = 0;
10935 decls && TREE_CODE (decls) == TREE_LIST;
10936 decls = TREE_CHAIN (decls), ++i)
10937 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
10938 continue;
10939 else if (TREE_VALUE (decls)
10940 != gimplify_omp_ctxp->loop_iter_var[2 * i])
10942 error_at (OMP_CLAUSE_LOCATION (c),
10943 "variable %qE is not an iteration "
10944 "of outermost loop %d, expected %qE",
10945 TREE_VALUE (decls), i + 1,
10946 gimplify_omp_ctxp->loop_iter_var[2 * i]);
10947 fail = true;
10948 failures++;
10950 else
10951 TREE_VALUE (decls)
10952 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
10953 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
10955 error_at (OMP_CLAUSE_LOCATION (c),
10956 "number of variables in %<depend(sink)%> "
10957 "clause does not match number of "
10958 "iteration variables");
10959 failures++;
10961 sink_c = c;
10963 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10964 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
10966 if (source_c)
10968 error_at (OMP_CLAUSE_LOCATION (c),
10969 "more than one %<depend(source)%> clause on an "
10970 "%<ordered%> construct");
10971 failures++;
10973 else
10974 source_c = c;
10977 if (source_c && sink_c)
10979 error_at (OMP_CLAUSE_LOCATION (source_c),
10980 "%<depend(source)%> clause specified together with "
10981 "%<depend(sink:)%> clauses on the same construct");
10982 failures++;
10985 if (failures)
10986 return gimple_build_nop ();
10987 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
10990 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
10991 expression produces a value to be used as an operand inside a GIMPLE
10992 statement, the value will be stored back in *EXPR_P. This value will
10993 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
10994 an SSA_NAME. The corresponding sequence of GIMPLE statements is
10995 emitted in PRE_P and POST_P.
10997 Additionally, this process may overwrite parts of the input
10998 expression during gimplification. Ideally, it should be
10999 possible to do non-destructive gimplification.
11001 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11002 the expression needs to evaluate to a value to be used as
11003 an operand in a GIMPLE statement, this value will be stored in
11004 *EXPR_P on exit. This happens when the caller specifies one
11005 of fb_lvalue or fb_rvalue fallback flags.
11007 PRE_P will contain the sequence of GIMPLE statements corresponding
11008 to the evaluation of EXPR and all the side-effects that must
11009 be executed before the main expression. On exit, the last
11010 statement of PRE_P is the core statement being gimplified. For
11011 instance, when gimplifying 'if (++a)' the last statement in
11012 PRE_P will be 'if (t.1)' where t.1 is the result of
11013 pre-incrementing 'a'.
11015 POST_P will contain the sequence of GIMPLE statements corresponding
11016 to the evaluation of all the side-effects that must be executed
11017 after the main expression. If this is NULL, the post
11018 side-effects are stored at the end of PRE_P.
11020 The reason why the output is split in two is to handle post
11021 side-effects explicitly. In some cases, an expression may have
11022 inner and outer post side-effects which need to be emitted in
11023 an order different from the one given by the recursive
11024 traversal. For instance, for the expression (*p--)++ the post
11025 side-effects of '--' must actually occur *after* the post
11026 side-effects of '++'. However, gimplification will first visit
11027 the inner expression, so if a separate POST sequence was not
11028 used, the resulting sequence would be:
11030 1 t.1 = *p
11031 2 p = p - 1
11032 3 t.2 = t.1 + 1
11033 4 *p = t.2
11035 However, the post-decrement operation in line #2 must not be
11036 evaluated until after the store to *p at line #4, so the
11037 correct sequence should be:
11039 1 t.1 = *p
11040 2 t.2 = t.1 + 1
11041 3 *p = t.2
11042 4 p = p - 1
11044 So, by specifying a separate post queue, it is possible
11045 to emit the post side-effects in the correct order.
11046 If POST_P is NULL, an internal queue will be used. Before
11047 returning to the caller, the sequence POST_P is appended to
11048 the main output sequence PRE_P.
11050 GIMPLE_TEST_F points to a function that takes a tree T and
11051 returns nonzero if T is in the GIMPLE form requested by the
11052 caller. The GIMPLE predicates are in gimple.c.
11054 FALLBACK tells the function what sort of a temporary we want if
11055 gimplification cannot produce an expression that complies with
11056 GIMPLE_TEST_F.
11058 fb_none means that no temporary should be generated
11059 fb_rvalue means that an rvalue is OK to generate
11060 fb_lvalue means that an lvalue is OK to generate
11061 fb_either means that either is OK, but an lvalue is preferable.
11062 fb_mayfail means that gimplification may fail (in which case
11063 GS_ERROR will be returned)
11065 The return value is either GS_ERROR or GS_ALL_DONE, since this
11066 function iterates until EXPR is completely gimplified or an error
11067 occurs. */
11069 enum gimplify_status
11070 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11071 bool (*gimple_test_f) (tree), fallback_t fallback)
11073 tree tmp;
11074 gimple_seq internal_pre = NULL;
11075 gimple_seq internal_post = NULL;
11076 tree save_expr;
11077 bool is_statement;
11078 location_t saved_location;
11079 enum gimplify_status ret;
11080 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
11081 tree label;
11083 save_expr = *expr_p;
11084 if (save_expr == NULL_TREE)
11085 return GS_ALL_DONE;
11087 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11088 is_statement = gimple_test_f == is_gimple_stmt;
11089 if (is_statement)
11090 gcc_assert (pre_p);
11092 /* Consistency checks. */
11093 if (gimple_test_f == is_gimple_reg)
11094 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11095 else if (gimple_test_f == is_gimple_val
11096 || gimple_test_f == is_gimple_call_addr
11097 || gimple_test_f == is_gimple_condexpr
11098 || gimple_test_f == is_gimple_mem_rhs
11099 || gimple_test_f == is_gimple_mem_rhs_or_call
11100 || gimple_test_f == is_gimple_reg_rhs
11101 || gimple_test_f == is_gimple_reg_rhs_or_call
11102 || gimple_test_f == is_gimple_asm_val
11103 || gimple_test_f == is_gimple_mem_ref_addr)
11104 gcc_assert (fallback & fb_rvalue);
11105 else if (gimple_test_f == is_gimple_min_lval
11106 || gimple_test_f == is_gimple_lvalue)
11107 gcc_assert (fallback & fb_lvalue);
11108 else if (gimple_test_f == is_gimple_addressable)
11109 gcc_assert (fallback & fb_either);
11110 else if (gimple_test_f == is_gimple_stmt)
11111 gcc_assert (fallback == fb_none);
11112 else
11114 /* We should have recognized the GIMPLE_TEST_F predicate to
11115 know what kind of fallback to use in case a temporary is
11116 needed to hold the value or address of *EXPR_P. */
11117 gcc_unreachable ();
11120 /* We used to check the predicate here and return immediately if it
11121 succeeds. This is wrong; the design is for gimplification to be
11122 idempotent, and for the predicates to only test for valid forms, not
11123 whether they are fully simplified. */
11124 if (pre_p == NULL)
11125 pre_p = &internal_pre;
11127 if (post_p == NULL)
11128 post_p = &internal_post;
11130 /* Remember the last statements added to PRE_P and POST_P. Every
11131 new statement added by the gimplification helpers needs to be
11132 annotated with location information. To centralize the
11133 responsibility, we remember the last statement that had been
11134 added to both queues before gimplifying *EXPR_P. If
11135 gimplification produces new statements in PRE_P and POST_P, those
11136 statements will be annotated with the same location information
11137 as *EXPR_P. */
11138 pre_last_gsi = gsi_last (*pre_p);
11139 post_last_gsi = gsi_last (*post_p);
11141 saved_location = input_location;
11142 if (save_expr != error_mark_node
11143 && EXPR_HAS_LOCATION (*expr_p))
11144 input_location = EXPR_LOCATION (*expr_p);
11146 /* Loop over the specific gimplifiers until the toplevel node
11147 remains the same. */
11150 /* Strip away as many useless type conversions as possible
11151 at the toplevel. */
11152 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11154 /* Remember the expr. */
11155 save_expr = *expr_p;
11157 /* Die, die, die, my darling. */
11158 if (save_expr == error_mark_node
11159 || (TREE_TYPE (save_expr)
11160 && TREE_TYPE (save_expr) == error_mark_node))
11162 ret = GS_ERROR;
11163 break;
11166 /* Do any language-specific gimplification. */
11167 ret = ((enum gimplify_status)
11168 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11169 if (ret == GS_OK)
11171 if (*expr_p == NULL_TREE)
11172 break;
11173 if (*expr_p != save_expr)
11174 continue;
11176 else if (ret != GS_UNHANDLED)
11177 break;
11179 /* Make sure that all the cases set 'ret' appropriately. */
11180 ret = GS_UNHANDLED;
11181 switch (TREE_CODE (*expr_p))
11183 /* First deal with the special cases. */
11185 case POSTINCREMENT_EXPR:
11186 case POSTDECREMENT_EXPR:
11187 case PREINCREMENT_EXPR:
11188 case PREDECREMENT_EXPR:
11189 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11190 fallback != fb_none,
11191 TREE_TYPE (*expr_p));
11192 break;
11194 case VIEW_CONVERT_EXPR:
11195 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11196 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11198 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11199 post_p, is_gimple_val, fb_rvalue);
11200 recalculate_side_effects (*expr_p);
11201 break;
11203 /* Fallthru. */
11205 case ARRAY_REF:
11206 case ARRAY_RANGE_REF:
11207 case REALPART_EXPR:
11208 case IMAGPART_EXPR:
11209 case COMPONENT_REF:
11210 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11211 fallback ? fallback : fb_rvalue);
11212 break;
11214 case COND_EXPR:
11215 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11217 /* C99 code may assign to an array in a structure value of a
11218 conditional expression, and this has undefined behavior
11219 only on execution, so create a temporary if an lvalue is
11220 required. */
11221 if (fallback == fb_lvalue)
11223 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11224 mark_addressable (*expr_p);
11225 ret = GS_OK;
11227 break;
11229 case CALL_EXPR:
11230 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11232 /* C99 code may assign to an array in a structure returned
11233 from a function, and this has undefined behavior only on
11234 execution, so create a temporary if an lvalue is
11235 required. */
11236 if (fallback == fb_lvalue)
11238 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11239 mark_addressable (*expr_p);
11240 ret = GS_OK;
11242 break;
11244 case TREE_LIST:
11245 gcc_unreachable ();
11247 case COMPOUND_EXPR:
11248 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11249 break;
11251 case COMPOUND_LITERAL_EXPR:
11252 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11253 gimple_test_f, fallback);
11254 break;
11256 case MODIFY_EXPR:
11257 case INIT_EXPR:
11258 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11259 fallback != fb_none);
11260 break;
11262 case TRUTH_ANDIF_EXPR:
11263 case TRUTH_ORIF_EXPR:
11265 /* Preserve the original type of the expression and the
11266 source location of the outer expression. */
11267 tree org_type = TREE_TYPE (*expr_p);
11268 *expr_p = gimple_boolify (*expr_p);
11269 *expr_p = build3_loc (input_location, COND_EXPR,
11270 org_type, *expr_p,
11271 fold_convert_loc
11272 (input_location,
11273 org_type, boolean_true_node),
11274 fold_convert_loc
11275 (input_location,
11276 org_type, boolean_false_node));
11277 ret = GS_OK;
11278 break;
11281 case TRUTH_NOT_EXPR:
11283 tree type = TREE_TYPE (*expr_p);
11284 /* The parsers are careful to generate TRUTH_NOT_EXPR
11285 only with operands that are always zero or one.
11286 We do not fold here but handle the only interesting case
11287 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11288 *expr_p = gimple_boolify (*expr_p);
11289 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11290 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11291 TREE_TYPE (*expr_p),
11292 TREE_OPERAND (*expr_p, 0));
11293 else
11294 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11295 TREE_TYPE (*expr_p),
11296 TREE_OPERAND (*expr_p, 0),
11297 build_int_cst (TREE_TYPE (*expr_p), 1));
11298 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11299 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11300 ret = GS_OK;
11301 break;
11304 case ADDR_EXPR:
11305 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11306 break;
11308 case ANNOTATE_EXPR:
11310 tree cond = TREE_OPERAND (*expr_p, 0);
11311 tree kind = TREE_OPERAND (*expr_p, 1);
11312 tree type = TREE_TYPE (cond);
11313 if (!INTEGRAL_TYPE_P (type))
11315 *expr_p = cond;
11316 ret = GS_OK;
11317 break;
11319 tree tmp = create_tmp_var (type);
11320 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11321 gcall *call
11322 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
11323 gimple_call_set_lhs (call, tmp);
11324 gimplify_seq_add_stmt (pre_p, call);
11325 *expr_p = tmp;
11326 ret = GS_ALL_DONE;
11327 break;
11330 case VA_ARG_EXPR:
11331 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11332 break;
11334 CASE_CONVERT:
11335 if (IS_EMPTY_STMT (*expr_p))
11337 ret = GS_ALL_DONE;
11338 break;
11341 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11342 || fallback == fb_none)
11344 /* Just strip a conversion to void (or in void context) and
11345 try again. */
11346 *expr_p = TREE_OPERAND (*expr_p, 0);
11347 ret = GS_OK;
11348 break;
11351 ret = gimplify_conversion (expr_p);
11352 if (ret == GS_ERROR)
11353 break;
11354 if (*expr_p != save_expr)
11355 break;
11356 /* FALLTHRU */
11358 case FIX_TRUNC_EXPR:
11359 /* unary_expr: ... | '(' cast ')' val | ... */
11360 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11361 is_gimple_val, fb_rvalue);
11362 recalculate_side_effects (*expr_p);
11363 break;
11365 case INDIRECT_REF:
11367 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11368 bool notrap = TREE_THIS_NOTRAP (*expr_p);
11369 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11371 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11372 if (*expr_p != save_expr)
11374 ret = GS_OK;
11375 break;
11378 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11379 is_gimple_reg, fb_rvalue);
11380 if (ret == GS_ERROR)
11381 break;
11383 recalculate_side_effects (*expr_p);
11384 *expr_p = fold_build2_loc (input_location, MEM_REF,
11385 TREE_TYPE (*expr_p),
11386 TREE_OPERAND (*expr_p, 0),
11387 build_int_cst (saved_ptr_type, 0));
11388 TREE_THIS_VOLATILE (*expr_p) = volatilep;
11389 TREE_THIS_NOTRAP (*expr_p) = notrap;
11390 ret = GS_OK;
11391 break;
11394 /* We arrive here through the various re-gimplifcation paths. */
11395 case MEM_REF:
11396 /* First try re-folding the whole thing. */
11397 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11398 TREE_OPERAND (*expr_p, 0),
11399 TREE_OPERAND (*expr_p, 1));
11400 if (tmp)
11402 REF_REVERSE_STORAGE_ORDER (tmp)
11403 = REF_REVERSE_STORAGE_ORDER (*expr_p);
11404 *expr_p = tmp;
11405 recalculate_side_effects (*expr_p);
11406 ret = GS_OK;
11407 break;
11409 /* Avoid re-gimplifying the address operand if it is already
11410 in suitable form. Re-gimplifying would mark the address
11411 operand addressable. Always gimplify when not in SSA form
11412 as we still may have to gimplify decls with value-exprs. */
11413 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
11414 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11416 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11417 is_gimple_mem_ref_addr, fb_rvalue);
11418 if (ret == GS_ERROR)
11419 break;
11421 recalculate_side_effects (*expr_p);
11422 ret = GS_ALL_DONE;
11423 break;
11425 /* Constants need not be gimplified. */
11426 case INTEGER_CST:
11427 case REAL_CST:
11428 case FIXED_CST:
11429 case STRING_CST:
11430 case COMPLEX_CST:
11431 case VECTOR_CST:
11432 /* Drop the overflow flag on constants, we do not want
11433 that in the GIMPLE IL. */
11434 if (TREE_OVERFLOW_P (*expr_p))
11435 *expr_p = drop_tree_overflow (*expr_p);
11436 ret = GS_ALL_DONE;
11437 break;
11439 case CONST_DECL:
11440 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11441 CONST_DECL node. Otherwise the decl is replaceable by its
11442 value. */
11443 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11444 if (fallback & fb_lvalue)
11445 ret = GS_ALL_DONE;
11446 else
11448 *expr_p = DECL_INITIAL (*expr_p);
11449 ret = GS_OK;
11451 break;
11453 case DECL_EXPR:
11454 ret = gimplify_decl_expr (expr_p, pre_p);
11455 break;
11457 case BIND_EXPR:
11458 ret = gimplify_bind_expr (expr_p, pre_p);
11459 break;
11461 case LOOP_EXPR:
11462 ret = gimplify_loop_expr (expr_p, pre_p);
11463 break;
11465 case SWITCH_EXPR:
11466 ret = gimplify_switch_expr (expr_p, pre_p);
11467 break;
11469 case EXIT_EXPR:
11470 ret = gimplify_exit_expr (expr_p);
11471 break;
11473 case GOTO_EXPR:
11474 /* If the target is not LABEL, then it is a computed jump
11475 and the target needs to be gimplified. */
11476 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11478 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11479 NULL, is_gimple_val, fb_rvalue);
11480 if (ret == GS_ERROR)
11481 break;
11483 gimplify_seq_add_stmt (pre_p,
11484 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11485 ret = GS_ALL_DONE;
11486 break;
11488 case PREDICT_EXPR:
11489 gimplify_seq_add_stmt (pre_p,
11490 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11491 PREDICT_EXPR_OUTCOME (*expr_p)));
11492 ret = GS_ALL_DONE;
11493 break;
11495 case LABEL_EXPR:
11496 ret = gimplify_label_expr (expr_p, pre_p);
11497 label = LABEL_EXPR_LABEL (*expr_p);
11498 gcc_assert (decl_function_context (label) == current_function_decl);
11500 /* If the label is used in a goto statement, or address of the label
11501 is taken, we need to unpoison all variables that were seen so far.
11502 Doing so would prevent us from reporting a false positives. */
11503 if (asan_poisoned_variables
11504 && asan_used_labels != NULL
11505 && asan_used_labels->contains (label))
11506 asan_poison_variables (asan_poisoned_variables, false, pre_p);
11507 break;
11509 case CASE_LABEL_EXPR:
11510 ret = gimplify_case_label_expr (expr_p, pre_p);
11512 if (gimplify_ctxp->live_switch_vars)
11513 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11514 pre_p);
11515 break;
11517 case RETURN_EXPR:
11518 ret = gimplify_return_expr (*expr_p, pre_p);
11519 break;
11521 case CONSTRUCTOR:
11522 /* Don't reduce this in place; let gimplify_init_constructor work its
11523 magic. Buf if we're just elaborating this for side effects, just
11524 gimplify any element that has side-effects. */
11525 if (fallback == fb_none)
11527 unsigned HOST_WIDE_INT ix;
11528 tree val;
11529 tree temp = NULL_TREE;
11530 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11531 if (TREE_SIDE_EFFECTS (val))
11532 append_to_statement_list (val, &temp);
11534 *expr_p = temp;
11535 ret = temp ? GS_OK : GS_ALL_DONE;
11537 /* C99 code may assign to an array in a constructed
11538 structure or union, and this has undefined behavior only
11539 on execution, so create a temporary if an lvalue is
11540 required. */
11541 else if (fallback == fb_lvalue)
11543 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11544 mark_addressable (*expr_p);
11545 ret = GS_OK;
11547 else
11548 ret = GS_ALL_DONE;
11549 break;
11551 /* The following are special cases that are not handled by the
11552 original GIMPLE grammar. */
11554 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11555 eliminated. */
11556 case SAVE_EXPR:
11557 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11558 break;
11560 case BIT_FIELD_REF:
11561 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11562 post_p, is_gimple_lvalue, fb_either);
11563 recalculate_side_effects (*expr_p);
11564 break;
11566 case TARGET_MEM_REF:
11568 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11570 if (TMR_BASE (*expr_p))
11571 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11572 post_p, is_gimple_mem_ref_addr, fb_either);
11573 if (TMR_INDEX (*expr_p))
11574 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11575 post_p, is_gimple_val, fb_rvalue);
11576 if (TMR_INDEX2 (*expr_p))
11577 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11578 post_p, is_gimple_val, fb_rvalue);
11579 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11580 ret = MIN (r0, r1);
11582 break;
11584 case NON_LVALUE_EXPR:
11585 /* This should have been stripped above. */
11586 gcc_unreachable ();
11588 case ASM_EXPR:
11589 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11590 break;
11592 case TRY_FINALLY_EXPR:
11593 case TRY_CATCH_EXPR:
11595 gimple_seq eval, cleanup;
11596 gtry *try_;
11598 /* Calls to destructors are generated automatically in FINALLY/CATCH
11599 block. They should have location as UNKNOWN_LOCATION. However,
11600 gimplify_call_expr will reset these call stmts to input_location
11601 if it finds stmt's location is unknown. To prevent resetting for
11602 destructors, we set the input_location to unknown.
11603 Note that this only affects the destructor calls in FINALLY/CATCH
11604 block, and will automatically reset to its original value by the
11605 end of gimplify_expr. */
11606 input_location = UNKNOWN_LOCATION;
11607 eval = cleanup = NULL;
11608 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11609 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11610 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11611 if (gimple_seq_empty_p (cleanup))
11613 gimple_seq_add_seq (pre_p, eval);
11614 ret = GS_ALL_DONE;
11615 break;
11617 try_ = gimple_build_try (eval, cleanup,
11618 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11619 ? GIMPLE_TRY_FINALLY
11620 : GIMPLE_TRY_CATCH);
11621 if (EXPR_HAS_LOCATION (save_expr))
11622 gimple_set_location (try_, EXPR_LOCATION (save_expr));
11623 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11624 gimple_set_location (try_, saved_location);
11625 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11626 gimple_try_set_catch_is_cleanup (try_,
11627 TRY_CATCH_IS_CLEANUP (*expr_p));
11628 gimplify_seq_add_stmt (pre_p, try_);
11629 ret = GS_ALL_DONE;
11630 break;
11633 case CLEANUP_POINT_EXPR:
11634 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11635 break;
11637 case TARGET_EXPR:
11638 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11639 break;
11641 case CATCH_EXPR:
11643 gimple *c;
11644 gimple_seq handler = NULL;
11645 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11646 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11647 gimplify_seq_add_stmt (pre_p, c);
11648 ret = GS_ALL_DONE;
11649 break;
11652 case EH_FILTER_EXPR:
11654 gimple *ehf;
11655 gimple_seq failure = NULL;
11657 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11658 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11659 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11660 gimplify_seq_add_stmt (pre_p, ehf);
11661 ret = GS_ALL_DONE;
11662 break;
11665 case OBJ_TYPE_REF:
11667 enum gimplify_status r0, r1;
11668 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11669 post_p, is_gimple_val, fb_rvalue);
11670 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11671 post_p, is_gimple_val, fb_rvalue);
11672 TREE_SIDE_EFFECTS (*expr_p) = 0;
11673 ret = MIN (r0, r1);
11675 break;
11677 case LABEL_DECL:
11678 /* We get here when taking the address of a label. We mark
11679 the label as "forced"; meaning it can never be removed and
11680 it is a potential target for any computed goto. */
11681 FORCED_LABEL (*expr_p) = 1;
11682 ret = GS_ALL_DONE;
11683 break;
11685 case STATEMENT_LIST:
11686 ret = gimplify_statement_list (expr_p, pre_p);
11687 break;
11689 case WITH_SIZE_EXPR:
11691 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11692 post_p == &internal_post ? NULL : post_p,
11693 gimple_test_f, fallback);
11694 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11695 is_gimple_val, fb_rvalue);
11696 ret = GS_ALL_DONE;
11698 break;
11700 case VAR_DECL:
11701 case PARM_DECL:
11702 ret = gimplify_var_or_parm_decl (expr_p);
11703 break;
11705 case RESULT_DECL:
11706 /* When within an OMP context, notice uses of variables. */
11707 if (gimplify_omp_ctxp)
11708 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
11709 ret = GS_ALL_DONE;
11710 break;
11712 case SSA_NAME:
11713 /* Allow callbacks into the gimplifier during optimization. */
11714 ret = GS_ALL_DONE;
11715 break;
11717 case OMP_PARALLEL:
11718 gimplify_omp_parallel (expr_p, pre_p);
11719 ret = GS_ALL_DONE;
11720 break;
11722 case OMP_TASK:
11723 gimplify_omp_task (expr_p, pre_p);
11724 ret = GS_ALL_DONE;
11725 break;
11727 case OMP_FOR:
11728 case OMP_SIMD:
11729 case CILK_SIMD:
11730 case CILK_FOR:
11731 case OMP_DISTRIBUTE:
11732 case OMP_TASKLOOP:
11733 case OACC_LOOP:
11734 ret = gimplify_omp_for (expr_p, pre_p);
11735 break;
11737 case OACC_CACHE:
11738 gimplify_oacc_cache (expr_p, pre_p);
11739 ret = GS_ALL_DONE;
11740 break;
11742 case OACC_DECLARE:
11743 gimplify_oacc_declare (expr_p, pre_p);
11744 ret = GS_ALL_DONE;
11745 break;
11747 case OACC_HOST_DATA:
11748 case OACC_DATA:
11749 case OACC_KERNELS:
11750 case OACC_PARALLEL:
11751 case OMP_SECTIONS:
11752 case OMP_SINGLE:
11753 case OMP_TARGET:
11754 case OMP_TARGET_DATA:
11755 case OMP_TEAMS:
11756 gimplify_omp_workshare (expr_p, pre_p);
11757 ret = GS_ALL_DONE;
11758 break;
11760 case OACC_ENTER_DATA:
11761 case OACC_EXIT_DATA:
11762 case OACC_UPDATE:
11763 case OMP_TARGET_UPDATE:
11764 case OMP_TARGET_ENTER_DATA:
11765 case OMP_TARGET_EXIT_DATA:
11766 gimplify_omp_target_update (expr_p, pre_p);
11767 ret = GS_ALL_DONE;
11768 break;
11770 case OMP_SECTION:
11771 case OMP_MASTER:
11772 case OMP_TASKGROUP:
11773 case OMP_ORDERED:
11774 case OMP_CRITICAL:
11776 gimple_seq body = NULL;
11777 gimple *g;
11779 gimplify_and_add (OMP_BODY (*expr_p), &body);
11780 switch (TREE_CODE (*expr_p))
11782 case OMP_SECTION:
11783 g = gimple_build_omp_section (body);
11784 break;
11785 case OMP_MASTER:
11786 g = gimple_build_omp_master (body);
11787 break;
11788 case OMP_TASKGROUP:
11790 gimple_seq cleanup = NULL;
11791 tree fn
11792 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11793 g = gimple_build_call (fn, 0);
11794 gimple_seq_add_stmt (&cleanup, g);
11795 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11796 body = NULL;
11797 gimple_seq_add_stmt (&body, g);
11798 g = gimple_build_omp_taskgroup (body);
11800 break;
11801 case OMP_ORDERED:
11802 g = gimplify_omp_ordered (*expr_p, body);
11803 break;
11804 case OMP_CRITICAL:
11805 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11806 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
11807 gimplify_adjust_omp_clauses (pre_p, body,
11808 &OMP_CRITICAL_CLAUSES (*expr_p),
11809 OMP_CRITICAL);
11810 g = gimple_build_omp_critical (body,
11811 OMP_CRITICAL_NAME (*expr_p),
11812 OMP_CRITICAL_CLAUSES (*expr_p));
11813 break;
11814 default:
11815 gcc_unreachable ();
11817 gimplify_seq_add_stmt (pre_p, g);
11818 ret = GS_ALL_DONE;
11819 break;
11822 case OMP_ATOMIC:
11823 case OMP_ATOMIC_READ:
11824 case OMP_ATOMIC_CAPTURE_OLD:
11825 case OMP_ATOMIC_CAPTURE_NEW:
11826 ret = gimplify_omp_atomic (expr_p, pre_p);
11827 break;
11829 case TRANSACTION_EXPR:
11830 ret = gimplify_transaction (expr_p, pre_p);
11831 break;
11833 case TRUTH_AND_EXPR:
11834 case TRUTH_OR_EXPR:
11835 case TRUTH_XOR_EXPR:
11837 tree orig_type = TREE_TYPE (*expr_p);
11838 tree new_type, xop0, xop1;
11839 *expr_p = gimple_boolify (*expr_p);
11840 new_type = TREE_TYPE (*expr_p);
11841 if (!useless_type_conversion_p (orig_type, new_type))
11843 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11844 ret = GS_OK;
11845 break;
11848 /* Boolified binary truth expressions are semantically equivalent
11849 to bitwise binary expressions. Canonicalize them to the
11850 bitwise variant. */
11851 switch (TREE_CODE (*expr_p))
11853 case TRUTH_AND_EXPR:
11854 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11855 break;
11856 case TRUTH_OR_EXPR:
11857 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11858 break;
11859 case TRUTH_XOR_EXPR:
11860 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11861 break;
11862 default:
11863 break;
11865 /* Now make sure that operands have compatible type to
11866 expression's new_type. */
11867 xop0 = TREE_OPERAND (*expr_p, 0);
11868 xop1 = TREE_OPERAND (*expr_p, 1);
11869 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11870 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11871 new_type,
11872 xop0);
11873 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11874 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11875 new_type,
11876 xop1);
11877 /* Continue classified as tcc_binary. */
11878 goto expr_2;
11881 case VEC_COND_EXPR:
11883 enum gimplify_status r0, r1, r2;
11885 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11886 post_p, is_gimple_condexpr, fb_rvalue);
11887 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11888 post_p, is_gimple_val, fb_rvalue);
11889 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11890 post_p, is_gimple_val, fb_rvalue);
11892 ret = MIN (MIN (r0, r1), r2);
11893 recalculate_side_effects (*expr_p);
11895 break;
11897 case FMA_EXPR:
11898 case VEC_PERM_EXPR:
11899 /* Classified as tcc_expression. */
11900 goto expr_3;
11902 case BIT_INSERT_EXPR:
11903 /* Argument 3 is a constant. */
11904 goto expr_2;
11906 case POINTER_PLUS_EXPR:
11908 enum gimplify_status r0, r1;
11909 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11910 post_p, is_gimple_val, fb_rvalue);
11911 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11912 post_p, is_gimple_val, fb_rvalue);
11913 recalculate_side_effects (*expr_p);
11914 ret = MIN (r0, r1);
11915 break;
11918 case CILK_SYNC_STMT:
11920 if (!fn_contains_cilk_spawn_p (cfun))
11922 error_at (EXPR_LOCATION (*expr_p),
11923 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
11924 ret = GS_ERROR;
11926 else
11928 gimplify_cilk_sync (expr_p, pre_p);
11929 ret = GS_ALL_DONE;
11931 break;
11934 default:
11935 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
11937 case tcc_comparison:
11938 /* Handle comparison of objects of non scalar mode aggregates
11939 with a call to memcmp. It would be nice to only have to do
11940 this for variable-sized objects, but then we'd have to allow
11941 the same nest of reference nodes we allow for MODIFY_EXPR and
11942 that's too complex.
11944 Compare scalar mode aggregates as scalar mode values. Using
11945 memcmp for them would be very inefficient at best, and is
11946 plain wrong if bitfields are involved. */
11948 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
11950 /* Vector comparisons need no boolification. */
11951 if (TREE_CODE (type) == VECTOR_TYPE)
11952 goto expr_2;
11953 else if (!AGGREGATE_TYPE_P (type))
11955 tree org_type = TREE_TYPE (*expr_p);
11956 *expr_p = gimple_boolify (*expr_p);
11957 if (!useless_type_conversion_p (org_type,
11958 TREE_TYPE (*expr_p)))
11960 *expr_p = fold_convert_loc (input_location,
11961 org_type, *expr_p);
11962 ret = GS_OK;
11964 else
11965 goto expr_2;
11967 else if (TYPE_MODE (type) != BLKmode)
11968 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
11969 else
11970 ret = gimplify_variable_sized_compare (expr_p);
11972 break;
11975 /* If *EXPR_P does not need to be special-cased, handle it
11976 according to its class. */
11977 case tcc_unary:
11978 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11979 post_p, is_gimple_val, fb_rvalue);
11980 break;
11982 case tcc_binary:
11983 expr_2:
11985 enum gimplify_status r0, r1;
11987 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11988 post_p, is_gimple_val, fb_rvalue);
11989 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11990 post_p, is_gimple_val, fb_rvalue);
11992 ret = MIN (r0, r1);
11993 break;
11996 expr_3:
11998 enum gimplify_status r0, r1, r2;
12000 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12001 post_p, is_gimple_val, fb_rvalue);
12002 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12003 post_p, is_gimple_val, fb_rvalue);
12004 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12005 post_p, is_gimple_val, fb_rvalue);
12007 ret = MIN (MIN (r0, r1), r2);
12008 break;
12011 case tcc_declaration:
12012 case tcc_constant:
12013 ret = GS_ALL_DONE;
12014 goto dont_recalculate;
12016 default:
12017 gcc_unreachable ();
12020 recalculate_side_effects (*expr_p);
12022 dont_recalculate:
12023 break;
12026 gcc_assert (*expr_p || ret != GS_OK);
12028 while (ret == GS_OK);
12030 /* If we encountered an error_mark somewhere nested inside, either
12031 stub out the statement or propagate the error back out. */
12032 if (ret == GS_ERROR)
12034 if (is_statement)
12035 *expr_p = NULL;
12036 goto out;
12039 /* This was only valid as a return value from the langhook, which
12040 we handled. Make sure it doesn't escape from any other context. */
12041 gcc_assert (ret != GS_UNHANDLED);
12043 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
12045 /* We aren't looking for a value, and we don't have a valid
12046 statement. If it doesn't have side-effects, throw it away.
12047 We can also get here with code such as "*&&L;", where L is
12048 a LABEL_DECL that is marked as FORCED_LABEL. */
12049 if (TREE_CODE (*expr_p) == LABEL_DECL
12050 || !TREE_SIDE_EFFECTS (*expr_p))
12051 *expr_p = NULL;
12052 else if (!TREE_THIS_VOLATILE (*expr_p))
12054 /* This is probably a _REF that contains something nested that
12055 has side effects. Recurse through the operands to find it. */
12056 enum tree_code code = TREE_CODE (*expr_p);
12058 switch (code)
12060 case COMPONENT_REF:
12061 case REALPART_EXPR:
12062 case IMAGPART_EXPR:
12063 case VIEW_CONVERT_EXPR:
12064 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12065 gimple_test_f, fallback);
12066 break;
12068 case ARRAY_REF:
12069 case ARRAY_RANGE_REF:
12070 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12071 gimple_test_f, fallback);
12072 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12073 gimple_test_f, fallback);
12074 break;
12076 default:
12077 /* Anything else with side-effects must be converted to
12078 a valid statement before we get here. */
12079 gcc_unreachable ();
12082 *expr_p = NULL;
12084 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12085 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
12087 /* Historically, the compiler has treated a bare reference
12088 to a non-BLKmode volatile lvalue as forcing a load. */
12089 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
12091 /* Normally, we do not want to create a temporary for a
12092 TREE_ADDRESSABLE type because such a type should not be
12093 copied by bitwise-assignment. However, we make an
12094 exception here, as all we are doing here is ensuring that
12095 we read the bytes that make up the type. We use
12096 create_tmp_var_raw because create_tmp_var will abort when
12097 given a TREE_ADDRESSABLE type. */
12098 tree tmp = create_tmp_var_raw (type, "vol");
12099 gimple_add_tmp_var (tmp);
12100 gimplify_assign (tmp, *expr_p, pre_p);
12101 *expr_p = NULL;
12103 else
12104 /* We can't do anything useful with a volatile reference to
12105 an incomplete type, so just throw it away. Likewise for
12106 a BLKmode type, since any implicit inner load should
12107 already have been turned into an explicit one by the
12108 gimplification process. */
12109 *expr_p = NULL;
12112 /* If we are gimplifying at the statement level, we're done. Tack
12113 everything together and return. */
12114 if (fallback == fb_none || is_statement)
12116 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12117 it out for GC to reclaim it. */
12118 *expr_p = NULL_TREE;
12120 if (!gimple_seq_empty_p (internal_pre)
12121 || !gimple_seq_empty_p (internal_post))
12123 gimplify_seq_add_seq (&internal_pre, internal_post);
12124 gimplify_seq_add_seq (pre_p, internal_pre);
12127 /* The result of gimplifying *EXPR_P is going to be the last few
12128 statements in *PRE_P and *POST_P. Add location information
12129 to all the statements that were added by the gimplification
12130 helpers. */
12131 if (!gimple_seq_empty_p (*pre_p))
12132 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12134 if (!gimple_seq_empty_p (*post_p))
12135 annotate_all_with_location_after (*post_p, post_last_gsi,
12136 input_location);
12138 goto out;
12141 #ifdef ENABLE_GIMPLE_CHECKING
12142 if (*expr_p)
12144 enum tree_code code = TREE_CODE (*expr_p);
12145 /* These expressions should already be in gimple IR form. */
12146 gcc_assert (code != MODIFY_EXPR
12147 && code != ASM_EXPR
12148 && code != BIND_EXPR
12149 && code != CATCH_EXPR
12150 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12151 && code != EH_FILTER_EXPR
12152 && code != GOTO_EXPR
12153 && code != LABEL_EXPR
12154 && code != LOOP_EXPR
12155 && code != SWITCH_EXPR
12156 && code != TRY_FINALLY_EXPR
12157 && code != OACC_PARALLEL
12158 && code != OACC_KERNELS
12159 && code != OACC_DATA
12160 && code != OACC_HOST_DATA
12161 && code != OACC_DECLARE
12162 && code != OACC_UPDATE
12163 && code != OACC_ENTER_DATA
12164 && code != OACC_EXIT_DATA
12165 && code != OACC_CACHE
12166 && code != OMP_CRITICAL
12167 && code != OMP_FOR
12168 && code != OACC_LOOP
12169 && code != OMP_MASTER
12170 && code != OMP_TASKGROUP
12171 && code != OMP_ORDERED
12172 && code != OMP_PARALLEL
12173 && code != OMP_SECTIONS
12174 && code != OMP_SECTION
12175 && code != OMP_SINGLE);
12177 #endif
12179 /* Otherwise we're gimplifying a subexpression, so the resulting
12180 value is interesting. If it's a valid operand that matches
12181 GIMPLE_TEST_F, we're done. Unless we are handling some
12182 post-effects internally; if that's the case, we need to copy into
12183 a temporary before adding the post-effects to POST_P. */
12184 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12185 goto out;
12187 /* Otherwise, we need to create a new temporary for the gimplified
12188 expression. */
12190 /* We can't return an lvalue if we have an internal postqueue. The
12191 object the lvalue refers to would (probably) be modified by the
12192 postqueue; we need to copy the value out first, which means an
12193 rvalue. */
12194 if ((fallback & fb_lvalue)
12195 && gimple_seq_empty_p (internal_post)
12196 && is_gimple_addressable (*expr_p))
12198 /* An lvalue will do. Take the address of the expression, store it
12199 in a temporary, and replace the expression with an INDIRECT_REF of
12200 that temporary. */
12201 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12202 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12203 *expr_p = build_simple_mem_ref (tmp);
12205 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12207 /* An rvalue will do. Assign the gimplified expression into a
12208 new temporary TMP and replace the original expression with
12209 TMP. First, make sure that the expression has a type so that
12210 it can be assigned into a temporary. */
12211 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12212 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12214 else
12216 #ifdef ENABLE_GIMPLE_CHECKING
12217 if (!(fallback & fb_mayfail))
12219 fprintf (stderr, "gimplification failed:\n");
12220 print_generic_expr (stderr, *expr_p, 0);
12221 debug_tree (*expr_p);
12222 internal_error ("gimplification failed");
12224 #endif
12225 gcc_assert (fallback & fb_mayfail);
12227 /* If this is an asm statement, and the user asked for the
12228 impossible, don't die. Fail and let gimplify_asm_expr
12229 issue an error. */
12230 ret = GS_ERROR;
12231 goto out;
12234 /* Make sure the temporary matches our predicate. */
12235 gcc_assert ((*gimple_test_f) (*expr_p));
12237 if (!gimple_seq_empty_p (internal_post))
12239 annotate_all_with_location (internal_post, input_location);
12240 gimplify_seq_add_seq (pre_p, internal_post);
12243 out:
12244 input_location = saved_location;
12245 return ret;
12248 /* Like gimplify_expr but make sure the gimplified result is not itself
12249 a SSA name (but a decl if it were). Temporaries required by
12250 evaluating *EXPR_P may be still SSA names. */
12252 static enum gimplify_status
12253 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12254 bool (*gimple_test_f) (tree), fallback_t fallback,
12255 bool allow_ssa)
12257 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12258 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12259 gimple_test_f, fallback);
12260 if (! allow_ssa
12261 && TREE_CODE (*expr_p) == SSA_NAME)
12263 tree name = *expr_p;
12264 if (was_ssa_name_p)
12265 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12266 else
12268 /* Avoid the extra copy if possible. */
12269 *expr_p = create_tmp_reg (TREE_TYPE (name));
12270 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12271 release_ssa_name (name);
12274 return ret;
12277 /* Look through TYPE for variable-sized objects and gimplify each such
12278 size that we find. Add to LIST_P any statements generated. */
12280 void
12281 gimplify_type_sizes (tree type, gimple_seq *list_p)
12283 tree field, t;
12285 if (type == NULL || type == error_mark_node)
12286 return;
12288 /* We first do the main variant, then copy into any other variants. */
12289 type = TYPE_MAIN_VARIANT (type);
12291 /* Avoid infinite recursion. */
12292 if (TYPE_SIZES_GIMPLIFIED (type))
12293 return;
12295 TYPE_SIZES_GIMPLIFIED (type) = 1;
12297 switch (TREE_CODE (type))
12299 case INTEGER_TYPE:
12300 case ENUMERAL_TYPE:
12301 case BOOLEAN_TYPE:
12302 case REAL_TYPE:
12303 case FIXED_POINT_TYPE:
12304 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12305 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12307 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12309 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12310 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12312 break;
12314 case ARRAY_TYPE:
12315 /* These types may not have declarations, so handle them here. */
12316 gimplify_type_sizes (TREE_TYPE (type), list_p);
12317 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12318 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12319 with assigned stack slots, for -O1+ -g they should be tracked
12320 by VTA. */
12321 if (!(TYPE_NAME (type)
12322 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12323 && DECL_IGNORED_P (TYPE_NAME (type)))
12324 && TYPE_DOMAIN (type)
12325 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12327 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12328 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12329 DECL_IGNORED_P (t) = 0;
12330 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12331 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12332 DECL_IGNORED_P (t) = 0;
12334 break;
12336 case RECORD_TYPE:
12337 case UNION_TYPE:
12338 case QUAL_UNION_TYPE:
12339 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12340 if (TREE_CODE (field) == FIELD_DECL)
12342 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12343 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12344 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12345 gimplify_type_sizes (TREE_TYPE (field), list_p);
12347 break;
12349 case POINTER_TYPE:
12350 case REFERENCE_TYPE:
12351 /* We used to recurse on the pointed-to type here, which turned out to
12352 be incorrect because its definition might refer to variables not
12353 yet initialized at this point if a forward declaration is involved.
12355 It was actually useful for anonymous pointed-to types to ensure
12356 that the sizes evaluation dominates every possible later use of the
12357 values. Restricting to such types here would be safe since there
12358 is no possible forward declaration around, but would introduce an
12359 undesirable middle-end semantic to anonymity. We then defer to
12360 front-ends the responsibility of ensuring that the sizes are
12361 evaluated both early and late enough, e.g. by attaching artificial
12362 type declarations to the tree. */
12363 break;
12365 default:
12366 break;
12369 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12370 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12372 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12374 TYPE_SIZE (t) = TYPE_SIZE (type);
12375 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12376 TYPE_SIZES_GIMPLIFIED (t) = 1;
12380 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12381 a size or position, has had all of its SAVE_EXPRs evaluated.
12382 We add any required statements to *STMT_P. */
12384 void
12385 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12387 tree expr = *expr_p;
12389 /* We don't do anything if the value isn't there, is constant, or contains
12390 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12391 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12392 will want to replace it with a new variable, but that will cause problems
12393 if this type is from outside the function. It's OK to have that here. */
12394 if (is_gimple_sizepos (expr))
12395 return;
12397 *expr_p = unshare_expr (expr);
12399 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12400 if the def vanishes. */
12401 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
12404 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12405 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12406 is true, also gimplify the parameters. */
12408 gbind *
12409 gimplify_body (tree fndecl, bool do_parms)
12411 location_t saved_location = input_location;
12412 gimple_seq parm_stmts, seq;
12413 gimple *outer_stmt;
12414 gbind *outer_bind;
12415 struct cgraph_node *cgn;
12417 timevar_push (TV_TREE_GIMPLIFY);
12419 init_tree_ssa (cfun);
12421 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12422 gimplification. */
12423 default_rtl_profile ();
12425 gcc_assert (gimplify_ctxp == NULL);
12426 push_gimplify_context (true);
12428 if (flag_openacc || flag_openmp)
12430 gcc_assert (gimplify_omp_ctxp == NULL);
12431 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12432 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12435 /* Unshare most shared trees in the body and in that of any nested functions.
12436 It would seem we don't have to do this for nested functions because
12437 they are supposed to be output and then the outer function gimplified
12438 first, but the g++ front end doesn't always do it that way. */
12439 unshare_body (fndecl);
12440 unvisit_body (fndecl);
12442 cgn = cgraph_node::get (fndecl);
12443 if (cgn && cgn->origin)
12444 nonlocal_vlas = new hash_set<tree>;
12446 /* Make sure input_location isn't set to something weird. */
12447 input_location = DECL_SOURCE_LOCATION (fndecl);
12449 /* Resolve callee-copies. This has to be done before processing
12450 the body so that DECL_VALUE_EXPR gets processed correctly. */
12451 parm_stmts = do_parms ? gimplify_parameters () : NULL;
12453 /* Gimplify the function's body. */
12454 seq = NULL;
12455 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12456 outer_stmt = gimple_seq_first_stmt (seq);
12457 if (!outer_stmt)
12459 outer_stmt = gimple_build_nop ();
12460 gimplify_seq_add_stmt (&seq, outer_stmt);
12463 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12464 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12465 if (gimple_code (outer_stmt) == GIMPLE_BIND
12466 && gimple_seq_first (seq) == gimple_seq_last (seq))
12467 outer_bind = as_a <gbind *> (outer_stmt);
12468 else
12469 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12471 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12473 /* If we had callee-copies statements, insert them at the beginning
12474 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12475 if (!gimple_seq_empty_p (parm_stmts))
12477 tree parm;
12479 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12480 gimple_bind_set_body (outer_bind, parm_stmts);
12482 for (parm = DECL_ARGUMENTS (current_function_decl);
12483 parm; parm = DECL_CHAIN (parm))
12484 if (DECL_HAS_VALUE_EXPR_P (parm))
12486 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12487 DECL_IGNORED_P (parm) = 0;
12491 if (nonlocal_vlas)
12493 if (nonlocal_vla_vars)
12495 /* tree-nested.c may later on call declare_vars (..., true);
12496 which relies on BLOCK_VARS chain to be the tail of the
12497 gimple_bind_vars chain. Ensure we don't violate that
12498 assumption. */
12499 if (gimple_bind_block (outer_bind)
12500 == DECL_INITIAL (current_function_decl))
12501 declare_vars (nonlocal_vla_vars, outer_bind, true);
12502 else
12503 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12504 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12505 nonlocal_vla_vars);
12506 nonlocal_vla_vars = NULL_TREE;
12508 delete nonlocal_vlas;
12509 nonlocal_vlas = NULL;
12512 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12513 && gimplify_omp_ctxp)
12515 delete_omp_context (gimplify_omp_ctxp);
12516 gimplify_omp_ctxp = NULL;
12519 pop_gimplify_context (outer_bind);
12520 gcc_assert (gimplify_ctxp == NULL);
12522 if (flag_checking && !seen_error ())
12523 verify_gimple_in_seq (gimple_bind_body (outer_bind));
12525 timevar_pop (TV_TREE_GIMPLIFY);
12526 input_location = saved_location;
12528 return outer_bind;
12531 typedef char *char_p; /* For DEF_VEC_P. */
12533 /* Return whether we should exclude FNDECL from instrumentation. */
12535 static bool
12536 flag_instrument_functions_exclude_p (tree fndecl)
12538 vec<char_p> *v;
12540 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12541 if (v && v->length () > 0)
12543 const char *name;
12544 int i;
12545 char *s;
12547 name = lang_hooks.decl_printable_name (fndecl, 0);
12548 FOR_EACH_VEC_ELT (*v, i, s)
12549 if (strstr (name, s) != NULL)
12550 return true;
12553 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12554 if (v && v->length () > 0)
12556 const char *name;
12557 int i;
12558 char *s;
12560 name = DECL_SOURCE_FILE (fndecl);
12561 FOR_EACH_VEC_ELT (*v, i, s)
12562 if (strstr (name, s) != NULL)
12563 return true;
12566 return false;
12569 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12570 node for the function we want to gimplify.
12572 Return the sequence of GIMPLE statements corresponding to the body
12573 of FNDECL. */
12575 void
12576 gimplify_function_tree (tree fndecl)
12578 tree parm, ret;
12579 gimple_seq seq;
12580 gbind *bind;
12582 gcc_assert (!gimple_body (fndecl));
12584 if (DECL_STRUCT_FUNCTION (fndecl))
12585 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12586 else
12587 push_struct_function (fndecl);
12589 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12590 if necessary. */
12591 cfun->curr_properties |= PROP_gimple_lva;
12593 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12595 /* Preliminarily mark non-addressed complex variables as eligible
12596 for promotion to gimple registers. We'll transform their uses
12597 as we find them. */
12598 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12599 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12600 && !TREE_THIS_VOLATILE (parm)
12601 && !needs_to_live_in_memory (parm))
12602 DECL_GIMPLE_REG_P (parm) = 1;
12605 ret = DECL_RESULT (fndecl);
12606 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12607 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12608 && !needs_to_live_in_memory (ret))
12609 DECL_GIMPLE_REG_P (ret) = 1;
12611 if (asan_sanitize_use_after_scope () && !asan_no_sanitize_address_p ())
12612 asan_poisoned_variables = new hash_set<tree> ();
12613 bind = gimplify_body (fndecl, true);
12614 if (asan_poisoned_variables)
12616 delete asan_poisoned_variables;
12617 asan_poisoned_variables = NULL;
12620 /* The tree body of the function is no longer needed, replace it
12621 with the new GIMPLE body. */
12622 seq = NULL;
12623 gimple_seq_add_stmt (&seq, bind);
12624 gimple_set_body (fndecl, seq);
12626 /* If we're instrumenting function entry/exit, then prepend the call to
12627 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12628 catch the exit hook. */
12629 /* ??? Add some way to ignore exceptions for this TFE. */
12630 if (flag_instrument_function_entry_exit
12631 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12632 /* Do not instrument extern inline functions. */
12633 && !(DECL_DECLARED_INLINE_P (fndecl)
12634 && DECL_EXTERNAL (fndecl)
12635 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12636 && !flag_instrument_functions_exclude_p (fndecl))
12638 tree x;
12639 gbind *new_bind;
12640 gimple *tf;
12641 gimple_seq cleanup = NULL, body = NULL;
12642 tree tmp_var;
12643 gcall *call;
12645 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12646 call = gimple_build_call (x, 1, integer_zero_node);
12647 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12648 gimple_call_set_lhs (call, tmp_var);
12649 gimplify_seq_add_stmt (&cleanup, call);
12650 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12651 call = gimple_build_call (x, 2,
12652 build_fold_addr_expr (current_function_decl),
12653 tmp_var);
12654 gimplify_seq_add_stmt (&cleanup, call);
12655 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12657 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12658 call = gimple_build_call (x, 1, integer_zero_node);
12659 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12660 gimple_call_set_lhs (call, tmp_var);
12661 gimplify_seq_add_stmt (&body, call);
12662 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12663 call = gimple_build_call (x, 2,
12664 build_fold_addr_expr (current_function_decl),
12665 tmp_var);
12666 gimplify_seq_add_stmt (&body, call);
12667 gimplify_seq_add_stmt (&body, tf);
12668 new_bind = gimple_build_bind (NULL, body, NULL);
12670 /* Replace the current function body with the body
12671 wrapped in the try/finally TF. */
12672 seq = NULL;
12673 gimple_seq_add_stmt (&seq, new_bind);
12674 gimple_set_body (fndecl, seq);
12675 bind = new_bind;
12678 if ((flag_sanitize & SANITIZE_THREAD) != 0
12679 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
12681 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
12682 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
12683 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
12684 /* Replace the current function body with the body
12685 wrapped in the try/finally TF. */
12686 seq = NULL;
12687 gimple_seq_add_stmt (&seq, new_bind);
12688 gimple_set_body (fndecl, seq);
12691 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12692 cfun->curr_properties |= PROP_gimple_any;
12694 pop_cfun ();
12696 dump_function (TDI_generic, fndecl);
12699 /* Return a dummy expression of type TYPE in order to keep going after an
12700 error. */
12702 static tree
12703 dummy_object (tree type)
12705 tree t = build_int_cst (build_pointer_type (type), 0);
12706 return build2 (MEM_REF, type, t, t);
12709 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12710 builtin function, but a very special sort of operator. */
12712 enum gimplify_status
12713 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
12714 gimple_seq *post_p ATTRIBUTE_UNUSED)
12716 tree promoted_type, have_va_type;
12717 tree valist = TREE_OPERAND (*expr_p, 0);
12718 tree type = TREE_TYPE (*expr_p);
12719 tree t, tag, aptag;
12720 location_t loc = EXPR_LOCATION (*expr_p);
12722 /* Verify that valist is of the proper type. */
12723 have_va_type = TREE_TYPE (valist);
12724 if (have_va_type == error_mark_node)
12725 return GS_ERROR;
12726 have_va_type = targetm.canonical_va_list_type (have_va_type);
12727 if (have_va_type == NULL_TREE
12728 && POINTER_TYPE_P (TREE_TYPE (valist)))
12729 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12730 have_va_type
12731 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
12732 gcc_assert (have_va_type != NULL_TREE);
12734 /* Generate a diagnostic for requesting data of a type that cannot
12735 be passed through `...' due to type promotion at the call site. */
12736 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
12737 != type)
12739 static bool gave_help;
12740 bool warned;
12741 /* Use the expansion point to handle cases such as passing bool (defined
12742 in a system header) through `...'. */
12743 source_location xloc
12744 = expansion_point_location_if_in_system_header (loc);
12746 /* Unfortunately, this is merely undefined, rather than a constraint
12747 violation, so we cannot make this an error. If this call is never
12748 executed, the program is still strictly conforming. */
12749 warned = warning_at (xloc, 0,
12750 "%qT is promoted to %qT when passed through %<...%>",
12751 type, promoted_type);
12752 if (!gave_help && warned)
12754 gave_help = true;
12755 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
12756 promoted_type, type);
12759 /* We can, however, treat "undefined" any way we please.
12760 Call abort to encourage the user to fix the program. */
12761 if (warned)
12762 inform (xloc, "if this code is reached, the program will abort");
12763 /* Before the abort, allow the evaluation of the va_list
12764 expression to exit or longjmp. */
12765 gimplify_and_add (valist, pre_p);
12766 t = build_call_expr_loc (loc,
12767 builtin_decl_implicit (BUILT_IN_TRAP), 0);
12768 gimplify_and_add (t, pre_p);
12770 /* This is dead code, but go ahead and finish so that the
12771 mode of the result comes out right. */
12772 *expr_p = dummy_object (type);
12773 return GS_ALL_DONE;
12776 tag = build_int_cst (build_pointer_type (type), 0);
12777 aptag = build_int_cst (TREE_TYPE (valist), 0);
12779 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12780 valist, tag, aptag);
12782 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12783 needs to be expanded. */
12784 cfun->curr_properties &= ~PROP_gimple_lva;
12786 return GS_OK;
12789 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12791 DST/SRC are the destination and source respectively. You can pass
12792 ungimplified trees in DST or SRC, in which case they will be
12793 converted to a gimple operand if necessary.
12795 This function returns the newly created GIMPLE_ASSIGN tuple. */
12797 gimple *
12798 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12800 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12801 gimplify_and_add (t, seq_p);
12802 ggc_free (t);
12803 return gimple_seq_last_stmt (*seq_p);
12806 inline hashval_t
12807 gimplify_hasher::hash (const elt_t *p)
12809 tree t = p->val;
12810 return iterative_hash_expr (t, 0);
12813 inline bool
12814 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
12816 tree t1 = p1->val;
12817 tree t2 = p2->val;
12818 enum tree_code code = TREE_CODE (t1);
12820 if (TREE_CODE (t2) != code
12821 || TREE_TYPE (t1) != TREE_TYPE (t2))
12822 return false;
12824 if (!operand_equal_p (t1, t2, 0))
12825 return false;
12827 /* Only allow them to compare equal if they also hash equal; otherwise
12828 results are nondeterminate, and we fail bootstrap comparison. */
12829 gcc_checking_assert (hash (p1) == hash (p2));
12831 return true;