Revert the attempted fix for c++/69855, it breaks bootstrap.
[official-gcc.git] / gcc / gimplify.c
blob8316bb8881f22745b705d6bb9f2e3a1af80d43f4
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa.h"
54 #include "omp-low.h"
55 #include "gimple-low.h"
56 #include "cilk.h"
57 #include "gomp-constants.h"
58 #include "tree-dump.h"
59 #include "gimple-walk.h"
60 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
61 #include "builtins.h"
63 enum gimplify_omp_var_data
65 GOVD_SEEN = 1,
66 GOVD_EXPLICIT = 2,
67 GOVD_SHARED = 4,
68 GOVD_PRIVATE = 8,
69 GOVD_FIRSTPRIVATE = 16,
70 GOVD_LASTPRIVATE = 32,
71 GOVD_REDUCTION = 64,
72 GOVD_LOCAL = 128,
73 GOVD_MAP = 256,
74 GOVD_DEBUG_PRIVATE = 512,
75 GOVD_PRIVATE_OUTER_REF = 1024,
76 GOVD_LINEAR = 2048,
77 GOVD_ALIGNED = 4096,
79 /* Flag for GOVD_MAP: don't copy back. */
80 GOVD_MAP_TO_ONLY = 8192,
82 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
83 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
85 GOVD_MAP_0LEN_ARRAY = 32768,
87 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
88 GOVD_MAP_ALWAYS_TO = 65536,
90 /* Flag for shared vars that are or might be stored to in the region. */
91 GOVD_WRITTEN = 131072,
93 /* Flag for GOVD_MAP, if it is a forced mapping. */
94 GOVD_MAP_FORCE = 262144,
96 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
97 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
98 | GOVD_LOCAL)
102 enum omp_region_type
104 ORT_WORKSHARE = 0x00,
105 ORT_SIMD = 0x01,
107 ORT_PARALLEL = 0x02,
108 ORT_COMBINED_PARALLEL = 0x03,
110 ORT_TASK = 0x04,
111 ORT_UNTIED_TASK = 0x05,
113 ORT_TEAMS = 0x08,
114 ORT_COMBINED_TEAMS = 0x09,
116 /* Data region. */
117 ORT_TARGET_DATA = 0x10,
119 /* Data region with offloading. */
120 ORT_TARGET = 0x20,
121 ORT_COMBINED_TARGET = 0x21,
123 /* OpenACC variants. */
124 ORT_ACC = 0x40, /* A generic OpenACC region. */
125 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
126 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
127 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
128 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
130 /* Dummy OpenMP region, used to disable expansion of
131 DECL_VALUE_EXPRs in taskloop pre body. */
132 ORT_NONE = 0x100
135 /* Gimplify hashtable helper. */
137 struct gimplify_hasher : free_ptr_hash <elt_t>
139 static inline hashval_t hash (const elt_t *);
140 static inline bool equal (const elt_t *, const elt_t *);
143 struct gimplify_ctx
145 struct gimplify_ctx *prev_context;
147 vec<gbind *> bind_expr_stack;
148 tree temps;
149 gimple_seq conditional_cleanups;
150 tree exit_label;
151 tree return_temp;
153 vec<tree> case_labels;
154 /* The formal temporary table. Should this be persistent? */
155 hash_table<gimplify_hasher> *temp_htab;
157 int conditions;
158 unsigned into_ssa : 1;
159 unsigned allow_rhs_cond_expr : 1;
160 unsigned in_cleanup_point_expr : 1;
161 unsigned keep_stack : 1;
162 unsigned save_stack : 1;
165 struct gimplify_omp_ctx
167 struct gimplify_omp_ctx *outer_context;
168 splay_tree variables;
169 hash_set<tree> *privatized_types;
170 /* Iteration variables in an OMP_FOR. */
171 vec<tree> loop_iter_var;
172 location_t location;
173 enum omp_clause_default_kind default_kind;
174 enum omp_region_type region_type;
175 bool combined_loop;
176 bool distribute;
177 bool target_map_scalars_firstprivate;
178 bool target_map_pointers_as_0len_arrays;
179 bool target_firstprivatize_array_bases;
182 static struct gimplify_ctx *gimplify_ctxp;
183 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
185 /* Forward declaration. */
186 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
187 static hash_map<tree, tree> *oacc_declare_returns;
188 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
189 bool (*) (tree), fallback_t, bool);
191 /* Shorter alias name for the above function for use in gimplify.c
192 only. */
194 static inline void
195 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
197 gimple_seq_add_stmt_without_update (seq_p, gs);
200 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
201 NULL, a new sequence is allocated. This function is
202 similar to gimple_seq_add_seq, but does not scan the operands.
203 During gimplification, we need to manipulate statement sequences
204 before the def/use vectors have been constructed. */
206 static void
207 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
209 gimple_stmt_iterator si;
211 if (src == NULL)
212 return;
214 si = gsi_last (*dst_p);
215 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
219 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
220 and popping gimplify contexts. */
222 static struct gimplify_ctx *ctx_pool = NULL;
224 /* Return a gimplify context struct from the pool. */
226 static inline struct gimplify_ctx *
227 ctx_alloc (void)
229 struct gimplify_ctx * c = ctx_pool;
231 if (c)
232 ctx_pool = c->prev_context;
233 else
234 c = XNEW (struct gimplify_ctx);
236 memset (c, '\0', sizeof (*c));
237 return c;
240 /* Put gimplify context C back into the pool. */
242 static inline void
243 ctx_free (struct gimplify_ctx *c)
245 c->prev_context = ctx_pool;
246 ctx_pool = c;
249 /* Free allocated ctx stack memory. */
251 void
252 free_gimplify_stack (void)
254 struct gimplify_ctx *c;
256 while ((c = ctx_pool))
258 ctx_pool = c->prev_context;
259 free (c);
264 /* Set up a context for the gimplifier. */
266 void
267 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
269 struct gimplify_ctx *c = ctx_alloc ();
271 c->prev_context = gimplify_ctxp;
272 gimplify_ctxp = c;
273 gimplify_ctxp->into_ssa = in_ssa;
274 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
277 /* Tear down a context for the gimplifier. If BODY is non-null, then
278 put the temporaries into the outer BIND_EXPR. Otherwise, put them
279 in the local_decls.
281 BODY is not a sequence, but the first tuple in a sequence. */
283 void
284 pop_gimplify_context (gimple *body)
286 struct gimplify_ctx *c = gimplify_ctxp;
288 gcc_assert (c
289 && (!c->bind_expr_stack.exists ()
290 || c->bind_expr_stack.is_empty ()));
291 c->bind_expr_stack.release ();
292 gimplify_ctxp = c->prev_context;
294 if (body)
295 declare_vars (c->temps, body, false);
296 else
297 record_vars (c->temps);
299 delete c->temp_htab;
300 c->temp_htab = NULL;
301 ctx_free (c);
304 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
306 static void
307 gimple_push_bind_expr (gbind *bind_stmt)
309 gimplify_ctxp->bind_expr_stack.reserve (8);
310 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
313 /* Pop the first element off the stack of bindings. */
315 static void
316 gimple_pop_bind_expr (void)
318 gimplify_ctxp->bind_expr_stack.pop ();
321 /* Return the first element of the stack of bindings. */
323 gbind *
324 gimple_current_bind_expr (void)
326 return gimplify_ctxp->bind_expr_stack.last ();
329 /* Return the stack of bindings created during gimplification. */
331 vec<gbind *>
332 gimple_bind_expr_stack (void)
334 return gimplify_ctxp->bind_expr_stack;
337 /* Return true iff there is a COND_EXPR between us and the innermost
338 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
340 static bool
341 gimple_conditional_context (void)
343 return gimplify_ctxp->conditions > 0;
346 /* Note that we've entered a COND_EXPR. */
348 static void
349 gimple_push_condition (void)
351 #ifdef ENABLE_GIMPLE_CHECKING
352 if (gimplify_ctxp->conditions == 0)
353 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
354 #endif
355 ++(gimplify_ctxp->conditions);
358 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
359 now, add any conditional cleanups we've seen to the prequeue. */
361 static void
362 gimple_pop_condition (gimple_seq *pre_p)
364 int conds = --(gimplify_ctxp->conditions);
366 gcc_assert (conds >= 0);
367 if (conds == 0)
369 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
370 gimplify_ctxp->conditional_cleanups = NULL;
374 /* A stable comparison routine for use with splay trees and DECLs. */
376 static int
377 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
379 tree a = (tree) xa;
380 tree b = (tree) xb;
382 return DECL_UID (a) - DECL_UID (b);
385 /* Create a new omp construct that deals with variable remapping. */
387 static struct gimplify_omp_ctx *
388 new_omp_context (enum omp_region_type region_type)
390 struct gimplify_omp_ctx *c;
392 c = XCNEW (struct gimplify_omp_ctx);
393 c->outer_context = gimplify_omp_ctxp;
394 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
395 c->privatized_types = new hash_set<tree>;
396 c->location = input_location;
397 c->region_type = region_type;
398 if ((region_type & ORT_TASK) == 0)
399 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
400 else
401 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
403 return c;
406 /* Destroy an omp construct that deals with variable remapping. */
408 static void
409 delete_omp_context (struct gimplify_omp_ctx *c)
411 splay_tree_delete (c->variables);
412 delete c->privatized_types;
413 c->loop_iter_var.release ();
414 XDELETE (c);
417 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
418 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
420 /* Both gimplify the statement T and append it to *SEQ_P. This function
421 behaves exactly as gimplify_stmt, but you don't have to pass T as a
422 reference. */
424 void
425 gimplify_and_add (tree t, gimple_seq *seq_p)
427 gimplify_stmt (&t, seq_p);
430 /* Gimplify statement T into sequence *SEQ_P, and return the first
431 tuple in the sequence of generated tuples for this statement.
432 Return NULL if gimplifying T produced no tuples. */
434 static gimple *
435 gimplify_and_return_first (tree t, gimple_seq *seq_p)
437 gimple_stmt_iterator last = gsi_last (*seq_p);
439 gimplify_and_add (t, seq_p);
441 if (!gsi_end_p (last))
443 gsi_next (&last);
444 return gsi_stmt (last);
446 else
447 return gimple_seq_first_stmt (*seq_p);
450 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
451 LHS, or for a call argument. */
453 static bool
454 is_gimple_mem_rhs (tree t)
456 /* If we're dealing with a renamable type, either source or dest must be
457 a renamed variable. */
458 if (is_gimple_reg_type (TREE_TYPE (t)))
459 return is_gimple_val (t);
460 else
461 return is_gimple_val (t) || is_gimple_lvalue (t);
464 /* Return true if T is a CALL_EXPR or an expression that can be
465 assigned to a temporary. Note that this predicate should only be
466 used during gimplification. See the rationale for this in
467 gimplify_modify_expr. */
469 static bool
470 is_gimple_reg_rhs_or_call (tree t)
472 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
473 || TREE_CODE (t) == CALL_EXPR);
476 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
477 this predicate should only be used during gimplification. See the
478 rationale for this in gimplify_modify_expr. */
480 static bool
481 is_gimple_mem_rhs_or_call (tree t)
483 /* If we're dealing with a renamable type, either source or dest must be
484 a renamed variable. */
485 if (is_gimple_reg_type (TREE_TYPE (t)))
486 return is_gimple_val (t);
487 else
488 return (is_gimple_val (t) || is_gimple_lvalue (t)
489 || TREE_CODE (t) == CALL_EXPR);
492 /* Create a temporary with a name derived from VAL. Subroutine of
493 lookup_tmp_var; nobody else should call this function. */
495 static inline tree
496 create_tmp_from_val (tree val)
498 /* Drop all qualifiers and address-space information from the value type. */
499 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
500 tree var = create_tmp_var (type, get_name (val));
501 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
502 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
503 DECL_GIMPLE_REG_P (var) = 1;
504 return var;
507 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
508 an existing expression temporary. */
510 static tree
511 lookup_tmp_var (tree val, bool is_formal)
513 tree ret;
515 /* If not optimizing, never really reuse a temporary. local-alloc
516 won't allocate any variable that is used in more than one basic
517 block, which means it will go into memory, causing much extra
518 work in reload and final and poorer code generation, outweighing
519 the extra memory allocation here. */
520 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
521 ret = create_tmp_from_val (val);
522 else
524 elt_t elt, *elt_p;
525 elt_t **slot;
527 elt.val = val;
528 if (!gimplify_ctxp->temp_htab)
529 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
530 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
531 if (*slot == NULL)
533 elt_p = XNEW (elt_t);
534 elt_p->val = val;
535 elt_p->temp = ret = create_tmp_from_val (val);
536 *slot = elt_p;
538 else
540 elt_p = *slot;
541 ret = elt_p->temp;
545 return ret;
548 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
550 static tree
551 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
552 bool is_formal, bool allow_ssa)
554 tree t, mod;
556 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
557 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
558 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
559 fb_rvalue);
561 if (allow_ssa
562 && gimplify_ctxp->into_ssa
563 && is_gimple_reg_type (TREE_TYPE (val)))
565 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
566 if (! gimple_in_ssa_p (cfun))
568 const char *name = get_name (val);
569 if (name)
570 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
573 else
574 t = lookup_tmp_var (val, is_formal);
576 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
578 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
580 /* gimplify_modify_expr might want to reduce this further. */
581 gimplify_and_add (mod, pre_p);
582 ggc_free (mod);
584 return t;
587 /* Return a formal temporary variable initialized with VAL. PRE_P is as
588 in gimplify_expr. Only use this function if:
590 1) The value of the unfactored expression represented by VAL will not
591 change between the initialization and use of the temporary, and
592 2) The temporary will not be otherwise modified.
594 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
595 and #2 means it is inappropriate for && temps.
597 For other cases, use get_initialized_tmp_var instead. */
599 tree
600 get_formal_tmp_var (tree val, gimple_seq *pre_p)
602 return internal_get_tmp_var (val, pre_p, NULL, true, true);
605 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
606 are as in gimplify_expr. */
608 tree
609 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
610 bool allow_ssa)
612 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
615 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
616 generate debug info for them; otherwise don't. */
618 void
619 declare_vars (tree vars, gimple *gs, bool debug_info)
621 tree last = vars;
622 if (last)
624 tree temps, block;
626 gbind *scope = as_a <gbind *> (gs);
628 temps = nreverse (last);
630 block = gimple_bind_block (scope);
631 gcc_assert (!block || TREE_CODE (block) == BLOCK);
632 if (!block || !debug_info)
634 DECL_CHAIN (last) = gimple_bind_vars (scope);
635 gimple_bind_set_vars (scope, temps);
637 else
639 /* We need to attach the nodes both to the BIND_EXPR and to its
640 associated BLOCK for debugging purposes. The key point here
641 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
642 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
643 if (BLOCK_VARS (block))
644 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
645 else
647 gimple_bind_set_vars (scope,
648 chainon (gimple_bind_vars (scope), temps));
649 BLOCK_VARS (block) = temps;
655 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
656 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
657 no such upper bound can be obtained. */
659 static void
660 force_constant_size (tree var)
662 /* The only attempt we make is by querying the maximum size of objects
663 of the variable's type. */
665 HOST_WIDE_INT max_size;
667 gcc_assert (TREE_CODE (var) == VAR_DECL);
669 max_size = max_int_size_in_bytes (TREE_TYPE (var));
671 gcc_assert (max_size >= 0);
673 DECL_SIZE_UNIT (var)
674 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
675 DECL_SIZE (var)
676 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
679 /* Push the temporary variable TMP into the current binding. */
681 void
682 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
684 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
686 /* Later processing assumes that the object size is constant, which might
687 not be true at this point. Force the use of a constant upper bound in
688 this case. */
689 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
690 force_constant_size (tmp);
692 DECL_CONTEXT (tmp) = fn->decl;
693 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
695 record_vars_into (tmp, fn->decl);
698 /* Push the temporary variable TMP into the current binding. */
700 void
701 gimple_add_tmp_var (tree tmp)
703 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
705 /* Later processing assumes that the object size is constant, which might
706 not be true at this point. Force the use of a constant upper bound in
707 this case. */
708 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
709 force_constant_size (tmp);
711 DECL_CONTEXT (tmp) = current_function_decl;
712 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
714 if (gimplify_ctxp)
716 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
717 gimplify_ctxp->temps = tmp;
719 /* Mark temporaries local within the nearest enclosing parallel. */
720 if (gimplify_omp_ctxp)
722 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
723 while (ctx
724 && (ctx->region_type == ORT_WORKSHARE
725 || ctx->region_type == ORT_SIMD
726 || ctx->region_type == ORT_ACC))
727 ctx = ctx->outer_context;
728 if (ctx)
729 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
732 else if (cfun)
733 record_vars (tmp);
734 else
736 gimple_seq body_seq;
738 /* This case is for nested functions. We need to expose the locals
739 they create. */
740 body_seq = gimple_body (current_function_decl);
741 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
747 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
748 nodes that are referenced more than once in GENERIC functions. This is
749 necessary because gimplification (translation into GIMPLE) is performed
750 by modifying tree nodes in-place, so gimplication of a shared node in a
751 first context could generate an invalid GIMPLE form in a second context.
753 This is achieved with a simple mark/copy/unmark algorithm that walks the
754 GENERIC representation top-down, marks nodes with TREE_VISITED the first
755 time it encounters them, duplicates them if they already have TREE_VISITED
756 set, and finally removes the TREE_VISITED marks it has set.
758 The algorithm works only at the function level, i.e. it generates a GENERIC
759 representation of a function with no nodes shared within the function when
760 passed a GENERIC function (except for nodes that are allowed to be shared).
762 At the global level, it is also necessary to unshare tree nodes that are
763 referenced in more than one function, for the same aforementioned reason.
764 This requires some cooperation from the front-end. There are 2 strategies:
766 1. Manual unsharing. The front-end needs to call unshare_expr on every
767 expression that might end up being shared across functions.
769 2. Deep unsharing. This is an extension of regular unsharing. Instead
770 of calling unshare_expr on expressions that might be shared across
771 functions, the front-end pre-marks them with TREE_VISITED. This will
772 ensure that they are unshared on the first reference within functions
773 when the regular unsharing algorithm runs. The counterpart is that
774 this algorithm must look deeper than for manual unsharing, which is
775 specified by LANG_HOOKS_DEEP_UNSHARING.
777 If there are only few specific cases of node sharing across functions, it is
778 probably easier for a front-end to unshare the expressions manually. On the
779 contrary, if the expressions generated at the global level are as widespread
780 as expressions generated within functions, deep unsharing is very likely the
781 way to go. */
783 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
784 These nodes model computations that must be done once. If we were to
785 unshare something like SAVE_EXPR(i++), the gimplification process would
786 create wrong code. However, if DATA is non-null, it must hold a pointer
787 set that is used to unshare the subtrees of these nodes. */
789 static tree
790 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
792 tree t = *tp;
793 enum tree_code code = TREE_CODE (t);
795 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
796 copy their subtrees if we can make sure to do it only once. */
797 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
799 if (data && !((hash_set<tree> *)data)->add (t))
801 else
802 *walk_subtrees = 0;
805 /* Stop at types, decls, constants like copy_tree_r. */
806 else if (TREE_CODE_CLASS (code) == tcc_type
807 || TREE_CODE_CLASS (code) == tcc_declaration
808 || TREE_CODE_CLASS (code) == tcc_constant
809 /* We can't do anything sensible with a BLOCK used as an
810 expression, but we also can't just die when we see it
811 because of non-expression uses. So we avert our eyes
812 and cross our fingers. Silly Java. */
813 || code == BLOCK)
814 *walk_subtrees = 0;
816 /* Cope with the statement expression extension. */
817 else if (code == STATEMENT_LIST)
820 /* Leave the bulk of the work to copy_tree_r itself. */
821 else
822 copy_tree_r (tp, walk_subtrees, NULL);
824 return NULL_TREE;
827 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
828 If *TP has been visited already, then *TP is deeply copied by calling
829 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
831 static tree
832 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
834 tree t = *tp;
835 enum tree_code code = TREE_CODE (t);
837 /* Skip types, decls, and constants. But we do want to look at their
838 types and the bounds of types. Mark them as visited so we properly
839 unmark their subtrees on the unmark pass. If we've already seen them,
840 don't look down further. */
841 if (TREE_CODE_CLASS (code) == tcc_type
842 || TREE_CODE_CLASS (code) == tcc_declaration
843 || TREE_CODE_CLASS (code) == tcc_constant)
845 if (TREE_VISITED (t))
846 *walk_subtrees = 0;
847 else
848 TREE_VISITED (t) = 1;
851 /* If this node has been visited already, unshare it and don't look
852 any deeper. */
853 else if (TREE_VISITED (t))
855 walk_tree (tp, mostly_copy_tree_r, data, NULL);
856 *walk_subtrees = 0;
859 /* Otherwise, mark the node as visited and keep looking. */
860 else
861 TREE_VISITED (t) = 1;
863 return NULL_TREE;
866 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
867 copy_if_shared_r callback unmodified. */
869 static inline void
870 copy_if_shared (tree *tp, void *data)
872 walk_tree (tp, copy_if_shared_r, data, NULL);
875 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
876 any nested functions. */
878 static void
879 unshare_body (tree fndecl)
881 struct cgraph_node *cgn = cgraph_node::get (fndecl);
882 /* If the language requires deep unsharing, we need a pointer set to make
883 sure we don't repeatedly unshare subtrees of unshareable nodes. */
884 hash_set<tree> *visited
885 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
887 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
888 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
889 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
891 delete visited;
893 if (cgn)
894 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
895 unshare_body (cgn->decl);
898 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
899 Subtrees are walked until the first unvisited node is encountered. */
901 static tree
902 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
904 tree t = *tp;
906 /* If this node has been visited, unmark it and keep looking. */
907 if (TREE_VISITED (t))
908 TREE_VISITED (t) = 0;
910 /* Otherwise, don't look any deeper. */
911 else
912 *walk_subtrees = 0;
914 return NULL_TREE;
917 /* Unmark the visited trees rooted at *TP. */
919 static inline void
920 unmark_visited (tree *tp)
922 walk_tree (tp, unmark_visited_r, NULL, NULL);
925 /* Likewise, but mark all trees as not visited. */
927 static void
928 unvisit_body (tree fndecl)
930 struct cgraph_node *cgn = cgraph_node::get (fndecl);
932 unmark_visited (&DECL_SAVED_TREE (fndecl));
933 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
934 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
936 if (cgn)
937 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
938 unvisit_body (cgn->decl);
941 /* Unconditionally make an unshared copy of EXPR. This is used when using
942 stored expressions which span multiple functions, such as BINFO_VTABLE,
943 as the normal unsharing process can't tell that they're shared. */
945 tree
946 unshare_expr (tree expr)
948 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
949 return expr;
952 /* Worker for unshare_expr_without_location. */
954 static tree
955 prune_expr_location (tree *tp, int *walk_subtrees, void *)
957 if (EXPR_P (*tp))
958 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
959 else
960 *walk_subtrees = 0;
961 return NULL_TREE;
964 /* Similar to unshare_expr but also prune all expression locations
965 from EXPR. */
967 tree
968 unshare_expr_without_location (tree expr)
970 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
971 if (EXPR_P (expr))
972 walk_tree (&expr, prune_expr_location, NULL, NULL);
973 return expr;
976 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
977 contain statements and have a value. Assign its value to a temporary
978 and give it void_type_node. Return the temporary, or NULL_TREE if
979 WRAPPER was already void. */
981 tree
982 voidify_wrapper_expr (tree wrapper, tree temp)
984 tree type = TREE_TYPE (wrapper);
985 if (type && !VOID_TYPE_P (type))
987 tree *p;
989 /* Set p to point to the body of the wrapper. Loop until we find
990 something that isn't a wrapper. */
991 for (p = &wrapper; p && *p; )
993 switch (TREE_CODE (*p))
995 case BIND_EXPR:
996 TREE_SIDE_EFFECTS (*p) = 1;
997 TREE_TYPE (*p) = void_type_node;
998 /* For a BIND_EXPR, the body is operand 1. */
999 p = &BIND_EXPR_BODY (*p);
1000 break;
1002 case CLEANUP_POINT_EXPR:
1003 case TRY_FINALLY_EXPR:
1004 case TRY_CATCH_EXPR:
1005 TREE_SIDE_EFFECTS (*p) = 1;
1006 TREE_TYPE (*p) = void_type_node;
1007 p = &TREE_OPERAND (*p, 0);
1008 break;
1010 case STATEMENT_LIST:
1012 tree_stmt_iterator i = tsi_last (*p);
1013 TREE_SIDE_EFFECTS (*p) = 1;
1014 TREE_TYPE (*p) = void_type_node;
1015 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1017 break;
1019 case COMPOUND_EXPR:
1020 /* Advance to the last statement. Set all container types to
1021 void. */
1022 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1024 TREE_SIDE_EFFECTS (*p) = 1;
1025 TREE_TYPE (*p) = void_type_node;
1027 break;
1029 case TRANSACTION_EXPR:
1030 TREE_SIDE_EFFECTS (*p) = 1;
1031 TREE_TYPE (*p) = void_type_node;
1032 p = &TRANSACTION_EXPR_BODY (*p);
1033 break;
1035 default:
1036 /* Assume that any tree upon which voidify_wrapper_expr is
1037 directly called is a wrapper, and that its body is op0. */
1038 if (p == &wrapper)
1040 TREE_SIDE_EFFECTS (*p) = 1;
1041 TREE_TYPE (*p) = void_type_node;
1042 p = &TREE_OPERAND (*p, 0);
1043 break;
1045 goto out;
1049 out:
1050 if (p == NULL || IS_EMPTY_STMT (*p))
1051 temp = NULL_TREE;
1052 else if (temp)
1054 /* The wrapper is on the RHS of an assignment that we're pushing
1055 down. */
1056 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1057 || TREE_CODE (temp) == MODIFY_EXPR);
1058 TREE_OPERAND (temp, 1) = *p;
1059 *p = temp;
1061 else
1063 temp = create_tmp_var (type, "retval");
1064 *p = build2 (INIT_EXPR, type, temp, *p);
1067 return temp;
1070 return NULL_TREE;
1073 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1074 a temporary through which they communicate. */
1076 static void
1077 build_stack_save_restore (gcall **save, gcall **restore)
1079 tree tmp_var;
1081 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1082 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1083 gimple_call_set_lhs (*save, tmp_var);
1085 *restore
1086 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1087 1, tmp_var);
1090 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1092 static enum gimplify_status
1093 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1095 tree bind_expr = *expr_p;
1096 bool old_keep_stack = gimplify_ctxp->keep_stack;
1097 bool old_save_stack = gimplify_ctxp->save_stack;
1098 tree t;
1099 gbind *bind_stmt;
1100 gimple_seq body, cleanup;
1101 gcall *stack_save;
1102 location_t start_locus = 0, end_locus = 0;
1103 tree ret_clauses = NULL;
1105 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1107 /* Mark variables seen in this bind expr. */
1108 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1110 if (TREE_CODE (t) == VAR_DECL)
1112 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1114 /* Mark variable as local. */
1115 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1116 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1117 || splay_tree_lookup (ctx->variables,
1118 (splay_tree_key) t) == NULL))
1120 if (ctx->region_type == ORT_SIMD
1121 && TREE_ADDRESSABLE (t)
1122 && !TREE_STATIC (t))
1123 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1124 else
1125 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1128 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1130 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1131 cfun->has_local_explicit_reg_vars = true;
1134 /* Preliminarily mark non-addressed complex variables as eligible
1135 for promotion to gimple registers. We'll transform their uses
1136 as we find them. */
1137 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1138 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1139 && !TREE_THIS_VOLATILE (t)
1140 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1141 && !needs_to_live_in_memory (t))
1142 DECL_GIMPLE_REG_P (t) = 1;
1145 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1146 BIND_EXPR_BLOCK (bind_expr));
1147 gimple_push_bind_expr (bind_stmt);
1149 gimplify_ctxp->keep_stack = false;
1150 gimplify_ctxp->save_stack = false;
1152 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1153 body = NULL;
1154 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1155 gimple_bind_set_body (bind_stmt, body);
1157 /* Source location wise, the cleanup code (stack_restore and clobbers)
1158 belongs to the end of the block, so propagate what we have. The
1159 stack_save operation belongs to the beginning of block, which we can
1160 infer from the bind_expr directly if the block has no explicit
1161 assignment. */
1162 if (BIND_EXPR_BLOCK (bind_expr))
1164 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1165 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1167 if (start_locus == 0)
1168 start_locus = EXPR_LOCATION (bind_expr);
1170 cleanup = NULL;
1171 stack_save = NULL;
1173 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1174 the stack space allocated to the VLAs. */
1175 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1177 gcall *stack_restore;
1179 /* Save stack on entry and restore it on exit. Add a try_finally
1180 block to achieve this. */
1181 build_stack_save_restore (&stack_save, &stack_restore);
1183 gimple_set_location (stack_save, start_locus);
1184 gimple_set_location (stack_restore, end_locus);
1186 gimplify_seq_add_stmt (&cleanup, stack_restore);
1189 /* Add clobbers for all variables that go out of scope. */
1190 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1192 if (TREE_CODE (t) == VAR_DECL
1193 && !is_global_var (t)
1194 && DECL_CONTEXT (t) == current_function_decl
1195 && !DECL_HARD_REGISTER (t)
1196 && !TREE_THIS_VOLATILE (t)
1197 && !DECL_HAS_VALUE_EXPR_P (t)
1198 /* Only care for variables that have to be in memory. Others
1199 will be rewritten into SSA names, hence moved to the top-level. */
1200 && !is_gimple_reg (t)
1201 && flag_stack_reuse != SR_NONE)
1203 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1204 gimple *clobber_stmt;
1205 TREE_THIS_VOLATILE (clobber) = 1;
1206 clobber_stmt = gimple_build_assign (t, clobber);
1207 gimple_set_location (clobber_stmt, end_locus);
1208 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1210 if (flag_openacc && oacc_declare_returns != NULL)
1212 tree *c = oacc_declare_returns->get (t);
1213 if (c != NULL)
1215 if (ret_clauses)
1216 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1218 ret_clauses = *c;
1220 oacc_declare_returns->remove (t);
1222 if (oacc_declare_returns->elements () == 0)
1224 delete oacc_declare_returns;
1225 oacc_declare_returns = NULL;
1232 if (ret_clauses)
1234 gomp_target *stmt;
1235 gimple_stmt_iterator si = gsi_start (cleanup);
1237 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1238 ret_clauses);
1239 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1242 if (cleanup)
1244 gtry *gs;
1245 gimple_seq new_body;
1247 new_body = NULL;
1248 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1249 GIMPLE_TRY_FINALLY);
1251 if (stack_save)
1252 gimplify_seq_add_stmt (&new_body, stack_save);
1253 gimplify_seq_add_stmt (&new_body, gs);
1254 gimple_bind_set_body (bind_stmt, new_body);
1257 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1258 if (!gimplify_ctxp->keep_stack)
1259 gimplify_ctxp->keep_stack = old_keep_stack;
1260 gimplify_ctxp->save_stack = old_save_stack;
1262 gimple_pop_bind_expr ();
1264 gimplify_seq_add_stmt (pre_p, bind_stmt);
1266 if (temp)
1268 *expr_p = temp;
1269 return GS_OK;
1272 *expr_p = NULL_TREE;
1273 return GS_ALL_DONE;
1276 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1277 GIMPLE value, it is assigned to a new temporary and the statement is
1278 re-written to return the temporary.
1280 PRE_P points to the sequence where side effects that must happen before
1281 STMT should be stored. */
1283 static enum gimplify_status
1284 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1286 greturn *ret;
1287 tree ret_expr = TREE_OPERAND (stmt, 0);
1288 tree result_decl, result;
1290 if (ret_expr == error_mark_node)
1291 return GS_ERROR;
1293 /* Implicit _Cilk_sync must be inserted right before any return statement
1294 if there is a _Cilk_spawn in the function. If the user has provided a
1295 _Cilk_sync, the optimizer should remove this duplicate one. */
1296 if (fn_contains_cilk_spawn_p (cfun))
1298 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1299 gimplify_and_add (impl_sync, pre_p);
1302 if (!ret_expr
1303 || TREE_CODE (ret_expr) == RESULT_DECL
1304 || ret_expr == error_mark_node)
1306 greturn *ret = gimple_build_return (ret_expr);
1307 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1308 gimplify_seq_add_stmt (pre_p, ret);
1309 return GS_ALL_DONE;
1312 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1313 result_decl = NULL_TREE;
1314 else
1316 result_decl = TREE_OPERAND (ret_expr, 0);
1318 /* See through a return by reference. */
1319 if (TREE_CODE (result_decl) == INDIRECT_REF)
1320 result_decl = TREE_OPERAND (result_decl, 0);
1322 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1323 || TREE_CODE (ret_expr) == INIT_EXPR)
1324 && TREE_CODE (result_decl) == RESULT_DECL);
1327 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1328 Recall that aggregate_value_p is FALSE for any aggregate type that is
1329 returned in registers. If we're returning values in registers, then
1330 we don't want to extend the lifetime of the RESULT_DECL, particularly
1331 across another call. In addition, for those aggregates for which
1332 hard_function_value generates a PARALLEL, we'll die during normal
1333 expansion of structure assignments; there's special code in expand_return
1334 to handle this case that does not exist in expand_expr. */
1335 if (!result_decl)
1336 result = NULL_TREE;
1337 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1339 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1341 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1342 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1343 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1344 should be effectively allocated by the caller, i.e. all calls to
1345 this function must be subject to the Return Slot Optimization. */
1346 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1347 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1349 result = result_decl;
1351 else if (gimplify_ctxp->return_temp)
1352 result = gimplify_ctxp->return_temp;
1353 else
1355 result = create_tmp_reg (TREE_TYPE (result_decl));
1357 /* ??? With complex control flow (usually involving abnormal edges),
1358 we can wind up warning about an uninitialized value for this. Due
1359 to how this variable is constructed and initialized, this is never
1360 true. Give up and never warn. */
1361 TREE_NO_WARNING (result) = 1;
1363 gimplify_ctxp->return_temp = result;
1366 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1367 Then gimplify the whole thing. */
1368 if (result != result_decl)
1369 TREE_OPERAND (ret_expr, 0) = result;
1371 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1373 ret = gimple_build_return (result);
1374 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1375 gimplify_seq_add_stmt (pre_p, ret);
1377 return GS_ALL_DONE;
1380 /* Gimplify a variable-length array DECL. */
1382 static void
1383 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1385 /* This is a variable-sized decl. Simplify its size and mark it
1386 for deferred expansion. */
1387 tree t, addr, ptr_type;
1389 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1390 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1392 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1393 if (DECL_HAS_VALUE_EXPR_P (decl))
1394 return;
1396 /* All occurrences of this decl in final gimplified code will be
1397 replaced by indirection. Setting DECL_VALUE_EXPR does two
1398 things: First, it lets the rest of the gimplifier know what
1399 replacement to use. Second, it lets the debug info know
1400 where to find the value. */
1401 ptr_type = build_pointer_type (TREE_TYPE (decl));
1402 addr = create_tmp_var (ptr_type, get_name (decl));
1403 DECL_IGNORED_P (addr) = 0;
1404 t = build_fold_indirect_ref (addr);
1405 TREE_THIS_NOTRAP (t) = 1;
1406 SET_DECL_VALUE_EXPR (decl, t);
1407 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1409 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1410 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1411 size_int (DECL_ALIGN (decl)));
1412 /* The call has been built for a variable-sized object. */
1413 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1414 t = fold_convert (ptr_type, t);
1415 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1417 gimplify_and_add (t, seq_p);
1420 /* A helper function to be called via walk_tree. Mark all labels under *TP
1421 as being forced. To be called for DECL_INITIAL of static variables. */
1423 static tree
1424 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1426 if (TYPE_P (*tp))
1427 *walk_subtrees = 0;
1428 if (TREE_CODE (*tp) == LABEL_DECL)
1430 FORCED_LABEL (*tp) = 1;
1431 cfun->has_forced_label_in_static = 1;
1434 return NULL_TREE;
1437 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1438 and initialization explicit. */
1440 static enum gimplify_status
1441 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1443 tree stmt = *stmt_p;
1444 tree decl = DECL_EXPR_DECL (stmt);
1446 *stmt_p = NULL_TREE;
1448 if (TREE_TYPE (decl) == error_mark_node)
1449 return GS_ERROR;
1451 if ((TREE_CODE (decl) == TYPE_DECL
1452 || TREE_CODE (decl) == VAR_DECL)
1453 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1455 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1456 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1457 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1460 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1461 in case its size expressions contain problematic nodes like CALL_EXPR. */
1462 if (TREE_CODE (decl) == TYPE_DECL
1463 && DECL_ORIGINAL_TYPE (decl)
1464 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1466 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1467 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1468 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1471 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1473 tree init = DECL_INITIAL (decl);
1475 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1476 || (!TREE_STATIC (decl)
1477 && flag_stack_check == GENERIC_STACK_CHECK
1478 && compare_tree_int (DECL_SIZE_UNIT (decl),
1479 STACK_CHECK_MAX_VAR_SIZE) > 0))
1480 gimplify_vla_decl (decl, seq_p);
1482 /* Some front ends do not explicitly declare all anonymous
1483 artificial variables. We compensate here by declaring the
1484 variables, though it would be better if the front ends would
1485 explicitly declare them. */
1486 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1487 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1488 gimple_add_tmp_var (decl);
1490 if (init && init != error_mark_node)
1492 if (!TREE_STATIC (decl))
1494 DECL_INITIAL (decl) = NULL_TREE;
1495 init = build2 (INIT_EXPR, void_type_node, decl, init);
1496 gimplify_and_add (init, seq_p);
1497 ggc_free (init);
1499 else
1500 /* We must still examine initializers for static variables
1501 as they may contain a label address. */
1502 walk_tree (&init, force_labels_r, NULL, NULL);
1506 return GS_ALL_DONE;
1509 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1510 and replacing the LOOP_EXPR with goto, but if the loop contains an
1511 EXIT_EXPR, we need to append a label for it to jump to. */
1513 static enum gimplify_status
1514 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1516 tree saved_label = gimplify_ctxp->exit_label;
1517 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1519 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1521 gimplify_ctxp->exit_label = NULL_TREE;
1523 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1525 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1527 if (gimplify_ctxp->exit_label)
1528 gimplify_seq_add_stmt (pre_p,
1529 gimple_build_label (gimplify_ctxp->exit_label));
1531 gimplify_ctxp->exit_label = saved_label;
1533 *expr_p = NULL;
1534 return GS_ALL_DONE;
1537 /* Gimplify a statement list onto a sequence. These may be created either
1538 by an enlightened front-end, or by shortcut_cond_expr. */
1540 static enum gimplify_status
1541 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1543 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1545 tree_stmt_iterator i = tsi_start (*expr_p);
1547 while (!tsi_end_p (i))
1549 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1550 tsi_delink (&i);
1553 if (temp)
1555 *expr_p = temp;
1556 return GS_OK;
1559 return GS_ALL_DONE;
1563 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1564 branch to. */
1566 static enum gimplify_status
1567 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1569 tree switch_expr = *expr_p;
1570 gimple_seq switch_body_seq = NULL;
1571 enum gimplify_status ret;
1572 tree index_type = TREE_TYPE (switch_expr);
1573 if (index_type == NULL_TREE)
1574 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1576 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1577 fb_rvalue);
1578 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1579 return ret;
1581 if (SWITCH_BODY (switch_expr))
1583 vec<tree> labels;
1584 vec<tree> saved_labels;
1585 tree default_case = NULL_TREE;
1586 gswitch *switch_stmt;
1588 /* If someone can be bothered to fill in the labels, they can
1589 be bothered to null out the body too. */
1590 gcc_assert (!SWITCH_LABELS (switch_expr));
1592 /* Save old labels, get new ones from body, then restore the old
1593 labels. Save all the things from the switch body to append after. */
1594 saved_labels = gimplify_ctxp->case_labels;
1595 gimplify_ctxp->case_labels.create (8);
1597 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1599 /* Possibly warn about unreachable statements between switch's
1600 controlling expression and the first case. */
1601 if (warn_switch_unreachable
1602 /* This warning doesn't play well with Fortran when optimizations
1603 are on. */
1604 && !lang_GNU_Fortran ()
1605 && switch_body_seq != NULL)
1607 gimple_seq seq = switch_body_seq;
1608 /* Look into the innermost lexical scope. */
1609 while (gimple_code (seq) == GIMPLE_BIND)
1610 seq = gimple_bind_body (as_a <gbind *> (seq));
1611 gimple *stmt = gimple_seq_first_stmt (seq);
1612 enum gimple_code code = gimple_code (stmt);
1613 if (code != GIMPLE_LABEL && code != GIMPLE_TRY)
1615 if (code == GIMPLE_GOTO
1616 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1617 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1618 /* Don't warn for compiler-generated gotos. These occur
1619 in Duff's devices, for example. */;
1620 else
1621 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1622 "statement will never be executed");
1625 labels = gimplify_ctxp->case_labels;
1626 gimplify_ctxp->case_labels = saved_labels;
1628 preprocess_case_label_vec_for_gimple (labels, index_type,
1629 &default_case);
1631 if (!default_case)
1633 glabel *new_default;
1635 default_case
1636 = build_case_label (NULL_TREE, NULL_TREE,
1637 create_artificial_label (UNKNOWN_LOCATION));
1638 new_default = gimple_build_label (CASE_LABEL (default_case));
1639 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1642 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
1643 default_case, labels);
1644 gimplify_seq_add_stmt (pre_p, switch_stmt);
1645 gimplify_seq_add_seq (pre_p, switch_body_seq);
1646 labels.release ();
1648 else
1649 gcc_assert (SWITCH_LABELS (switch_expr));
1651 return GS_ALL_DONE;
1654 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1656 static enum gimplify_status
1657 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1659 struct gimplify_ctx *ctxp;
1660 glabel *label_stmt;
1662 /* Invalid programs can play Duff's Device type games with, for example,
1663 #pragma omp parallel. At least in the C front end, we don't
1664 detect such invalid branches until after gimplification, in the
1665 diagnose_omp_blocks pass. */
1666 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1667 if (ctxp->case_labels.exists ())
1668 break;
1670 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
1671 ctxp->case_labels.safe_push (*expr_p);
1672 gimplify_seq_add_stmt (pre_p, label_stmt);
1674 return GS_ALL_DONE;
1677 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1678 if necessary. */
1680 tree
1681 build_and_jump (tree *label_p)
1683 if (label_p == NULL)
1684 /* If there's nowhere to jump, just fall through. */
1685 return NULL_TREE;
1687 if (*label_p == NULL_TREE)
1689 tree label = create_artificial_label (UNKNOWN_LOCATION);
1690 *label_p = label;
1693 return build1 (GOTO_EXPR, void_type_node, *label_p);
1696 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1697 This also involves building a label to jump to and communicating it to
1698 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1700 static enum gimplify_status
1701 gimplify_exit_expr (tree *expr_p)
1703 tree cond = TREE_OPERAND (*expr_p, 0);
1704 tree expr;
1706 expr = build_and_jump (&gimplify_ctxp->exit_label);
1707 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1708 *expr_p = expr;
1710 return GS_OK;
1713 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1714 different from its canonical type, wrap the whole thing inside a
1715 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1716 type.
1718 The canonical type of a COMPONENT_REF is the type of the field being
1719 referenced--unless the field is a bit-field which can be read directly
1720 in a smaller mode, in which case the canonical type is the
1721 sign-appropriate type corresponding to that mode. */
1723 static void
1724 canonicalize_component_ref (tree *expr_p)
1726 tree expr = *expr_p;
1727 tree type;
1729 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1731 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1732 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1733 else
1734 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1736 /* One could argue that all the stuff below is not necessary for
1737 the non-bitfield case and declare it a FE error if type
1738 adjustment would be needed. */
1739 if (TREE_TYPE (expr) != type)
1741 #ifdef ENABLE_TYPES_CHECKING
1742 tree old_type = TREE_TYPE (expr);
1743 #endif
1744 int type_quals;
1746 /* We need to preserve qualifiers and propagate them from
1747 operand 0. */
1748 type_quals = TYPE_QUALS (type)
1749 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1750 if (TYPE_QUALS (type) != type_quals)
1751 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1753 /* Set the type of the COMPONENT_REF to the underlying type. */
1754 TREE_TYPE (expr) = type;
1756 #ifdef ENABLE_TYPES_CHECKING
1757 /* It is now a FE error, if the conversion from the canonical
1758 type to the original expression type is not useless. */
1759 gcc_assert (useless_type_conversion_p (old_type, type));
1760 #endif
1764 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1765 to foo, embed that change in the ADDR_EXPR by converting
1766 T array[U];
1767 (T *)&array
1769 &array[L]
1770 where L is the lower bound. For simplicity, only do this for constant
1771 lower bound.
1772 The constraint is that the type of &array[L] is trivially convertible
1773 to T *. */
1775 static void
1776 canonicalize_addr_expr (tree *expr_p)
1778 tree expr = *expr_p;
1779 tree addr_expr = TREE_OPERAND (expr, 0);
1780 tree datype, ddatype, pddatype;
1782 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1783 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1784 || TREE_CODE (addr_expr) != ADDR_EXPR)
1785 return;
1787 /* The addr_expr type should be a pointer to an array. */
1788 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1789 if (TREE_CODE (datype) != ARRAY_TYPE)
1790 return;
1792 /* The pointer to element type shall be trivially convertible to
1793 the expression pointer type. */
1794 ddatype = TREE_TYPE (datype);
1795 pddatype = build_pointer_type (ddatype);
1796 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1797 pddatype))
1798 return;
1800 /* The lower bound and element sizes must be constant. */
1801 if (!TYPE_SIZE_UNIT (ddatype)
1802 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1803 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1804 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1805 return;
1807 /* All checks succeeded. Build a new node to merge the cast. */
1808 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1809 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1810 NULL_TREE, NULL_TREE);
1811 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1813 /* We can have stripped a required restrict qualifier above. */
1814 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1815 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1818 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1819 underneath as appropriate. */
1821 static enum gimplify_status
1822 gimplify_conversion (tree *expr_p)
1824 location_t loc = EXPR_LOCATION (*expr_p);
1825 gcc_assert (CONVERT_EXPR_P (*expr_p));
1827 /* Then strip away all but the outermost conversion. */
1828 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1830 /* And remove the outermost conversion if it's useless. */
1831 if (tree_ssa_useless_type_conversion (*expr_p))
1832 *expr_p = TREE_OPERAND (*expr_p, 0);
1834 /* If we still have a conversion at the toplevel,
1835 then canonicalize some constructs. */
1836 if (CONVERT_EXPR_P (*expr_p))
1838 tree sub = TREE_OPERAND (*expr_p, 0);
1840 /* If a NOP conversion is changing the type of a COMPONENT_REF
1841 expression, then canonicalize its type now in order to expose more
1842 redundant conversions. */
1843 if (TREE_CODE (sub) == COMPONENT_REF)
1844 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1846 /* If a NOP conversion is changing a pointer to array of foo
1847 to a pointer to foo, embed that change in the ADDR_EXPR. */
1848 else if (TREE_CODE (sub) == ADDR_EXPR)
1849 canonicalize_addr_expr (expr_p);
1852 /* If we have a conversion to a non-register type force the
1853 use of a VIEW_CONVERT_EXPR instead. */
1854 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1855 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1856 TREE_OPERAND (*expr_p, 0));
1858 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
1859 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
1860 TREE_SET_CODE (*expr_p, NOP_EXPR);
1862 return GS_OK;
1865 /* Nonlocal VLAs seen in the current function. */
1866 static hash_set<tree> *nonlocal_vlas;
1868 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1869 static tree nonlocal_vla_vars;
1871 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1872 DECL_VALUE_EXPR, and it's worth re-examining things. */
1874 static enum gimplify_status
1875 gimplify_var_or_parm_decl (tree *expr_p)
1877 tree decl = *expr_p;
1879 /* ??? If this is a local variable, and it has not been seen in any
1880 outer BIND_EXPR, then it's probably the result of a duplicate
1881 declaration, for which we've already issued an error. It would
1882 be really nice if the front end wouldn't leak these at all.
1883 Currently the only known culprit is C++ destructors, as seen
1884 in g++.old-deja/g++.jason/binding.C. */
1885 if (TREE_CODE (decl) == VAR_DECL
1886 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1887 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1888 && decl_function_context (decl) == current_function_decl)
1890 gcc_assert (seen_error ());
1891 return GS_ERROR;
1894 /* When within an OMP context, notice uses of variables. */
1895 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1896 return GS_ALL_DONE;
1898 /* If the decl is an alias for another expression, substitute it now. */
1899 if (DECL_HAS_VALUE_EXPR_P (decl))
1901 tree value_expr = DECL_VALUE_EXPR (decl);
1903 /* For referenced nonlocal VLAs add a decl for debugging purposes
1904 to the current function. */
1905 if (TREE_CODE (decl) == VAR_DECL
1906 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1907 && nonlocal_vlas != NULL
1908 && TREE_CODE (value_expr) == INDIRECT_REF
1909 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1910 && decl_function_context (decl) != current_function_decl)
1912 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1913 while (ctx
1914 && (ctx->region_type == ORT_WORKSHARE
1915 || ctx->region_type == ORT_SIMD
1916 || ctx->region_type == ORT_ACC))
1917 ctx = ctx->outer_context;
1918 if (!ctx && !nonlocal_vlas->add (decl))
1920 tree copy = copy_node (decl);
1922 lang_hooks.dup_lang_specific_decl (copy);
1923 SET_DECL_RTL (copy, 0);
1924 TREE_USED (copy) = 1;
1925 DECL_CHAIN (copy) = nonlocal_vla_vars;
1926 nonlocal_vla_vars = copy;
1927 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1928 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1932 *expr_p = unshare_expr (value_expr);
1933 return GS_OK;
1936 return GS_ALL_DONE;
1939 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1941 static void
1942 recalculate_side_effects (tree t)
1944 enum tree_code code = TREE_CODE (t);
1945 int len = TREE_OPERAND_LENGTH (t);
1946 int i;
1948 switch (TREE_CODE_CLASS (code))
1950 case tcc_expression:
1951 switch (code)
1953 case INIT_EXPR:
1954 case MODIFY_EXPR:
1955 case VA_ARG_EXPR:
1956 case PREDECREMENT_EXPR:
1957 case PREINCREMENT_EXPR:
1958 case POSTDECREMENT_EXPR:
1959 case POSTINCREMENT_EXPR:
1960 /* All of these have side-effects, no matter what their
1961 operands are. */
1962 return;
1964 default:
1965 break;
1967 /* Fall through. */
1969 case tcc_comparison: /* a comparison expression */
1970 case tcc_unary: /* a unary arithmetic expression */
1971 case tcc_binary: /* a binary arithmetic expression */
1972 case tcc_reference: /* a reference */
1973 case tcc_vl_exp: /* a function call */
1974 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1975 for (i = 0; i < len; ++i)
1977 tree op = TREE_OPERAND (t, i);
1978 if (op && TREE_SIDE_EFFECTS (op))
1979 TREE_SIDE_EFFECTS (t) = 1;
1981 break;
1983 case tcc_constant:
1984 /* No side-effects. */
1985 return;
1987 default:
1988 gcc_unreachable ();
1992 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1993 node *EXPR_P.
1995 compound_lval
1996 : min_lval '[' val ']'
1997 | min_lval '.' ID
1998 | compound_lval '[' val ']'
1999 | compound_lval '.' ID
2001 This is not part of the original SIMPLE definition, which separates
2002 array and member references, but it seems reasonable to handle them
2003 together. Also, this way we don't run into problems with union
2004 aliasing; gcc requires that for accesses through a union to alias, the
2005 union reference must be explicit, which was not always the case when we
2006 were splitting up array and member refs.
2008 PRE_P points to the sequence where side effects that must happen before
2009 *EXPR_P should be stored.
2011 POST_P points to the sequence where side effects that must happen after
2012 *EXPR_P should be stored. */
2014 static enum gimplify_status
2015 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2016 fallback_t fallback)
2018 tree *p;
2019 enum gimplify_status ret = GS_ALL_DONE, tret;
2020 int i;
2021 location_t loc = EXPR_LOCATION (*expr_p);
2022 tree expr = *expr_p;
2024 /* Create a stack of the subexpressions so later we can walk them in
2025 order from inner to outer. */
2026 auto_vec<tree, 10> expr_stack;
2028 /* We can handle anything that get_inner_reference can deal with. */
2029 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2031 restart:
2032 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2033 if (TREE_CODE (*p) == INDIRECT_REF)
2034 *p = fold_indirect_ref_loc (loc, *p);
2036 if (handled_component_p (*p))
2038 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2039 additional COMPONENT_REFs. */
2040 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
2041 && gimplify_var_or_parm_decl (p) == GS_OK)
2042 goto restart;
2043 else
2044 break;
2046 expr_stack.safe_push (*p);
2049 gcc_assert (expr_stack.length ());
2051 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2052 walked through and P points to the innermost expression.
2054 Java requires that we elaborated nodes in source order. That
2055 means we must gimplify the inner expression followed by each of
2056 the indices, in order. But we can't gimplify the inner
2057 expression until we deal with any variable bounds, sizes, or
2058 positions in order to deal with PLACEHOLDER_EXPRs.
2060 So we do this in three steps. First we deal with the annotations
2061 for any variables in the components, then we gimplify the base,
2062 then we gimplify any indices, from left to right. */
2063 for (i = expr_stack.length () - 1; i >= 0; i--)
2065 tree t = expr_stack[i];
2067 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2069 /* Gimplify the low bound and element type size and put them into
2070 the ARRAY_REF. If these values are set, they have already been
2071 gimplified. */
2072 if (TREE_OPERAND (t, 2) == NULL_TREE)
2074 tree low = unshare_expr (array_ref_low_bound (t));
2075 if (!is_gimple_min_invariant (low))
2077 TREE_OPERAND (t, 2) = low;
2078 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2079 post_p, is_gimple_reg,
2080 fb_rvalue);
2081 ret = MIN (ret, tret);
2084 else
2086 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2087 is_gimple_reg, fb_rvalue);
2088 ret = MIN (ret, tret);
2091 if (TREE_OPERAND (t, 3) == NULL_TREE)
2093 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2094 tree elmt_size = unshare_expr (array_ref_element_size (t));
2095 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2097 /* Divide the element size by the alignment of the element
2098 type (above). */
2099 elmt_size
2100 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2102 if (!is_gimple_min_invariant (elmt_size))
2104 TREE_OPERAND (t, 3) = elmt_size;
2105 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2106 post_p, is_gimple_reg,
2107 fb_rvalue);
2108 ret = MIN (ret, tret);
2111 else
2113 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2114 is_gimple_reg, fb_rvalue);
2115 ret = MIN (ret, tret);
2118 else if (TREE_CODE (t) == COMPONENT_REF)
2120 /* Set the field offset into T and gimplify it. */
2121 if (TREE_OPERAND (t, 2) == NULL_TREE)
2123 tree offset = unshare_expr (component_ref_field_offset (t));
2124 tree field = TREE_OPERAND (t, 1);
2125 tree factor
2126 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2128 /* Divide the offset by its alignment. */
2129 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2131 if (!is_gimple_min_invariant (offset))
2133 TREE_OPERAND (t, 2) = offset;
2134 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2135 post_p, is_gimple_reg,
2136 fb_rvalue);
2137 ret = MIN (ret, tret);
2140 else
2142 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2143 is_gimple_reg, fb_rvalue);
2144 ret = MIN (ret, tret);
2149 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2150 so as to match the min_lval predicate. Failure to do so may result
2151 in the creation of large aggregate temporaries. */
2152 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2153 fallback | fb_lvalue);
2154 ret = MIN (ret, tret);
2156 /* And finally, the indices and operands of ARRAY_REF. During this
2157 loop we also remove any useless conversions. */
2158 for (; expr_stack.length () > 0; )
2160 tree t = expr_stack.pop ();
2162 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2164 /* Gimplify the dimension. */
2165 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2167 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2168 is_gimple_val, fb_rvalue);
2169 ret = MIN (ret, tret);
2173 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2175 /* The innermost expression P may have originally had
2176 TREE_SIDE_EFFECTS set which would have caused all the outer
2177 expressions in *EXPR_P leading to P to also have had
2178 TREE_SIDE_EFFECTS set. */
2179 recalculate_side_effects (t);
2182 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2183 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2185 canonicalize_component_ref (expr_p);
2188 expr_stack.release ();
2190 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2192 return ret;
2195 /* Gimplify the self modifying expression pointed to by EXPR_P
2196 (++, --, +=, -=).
2198 PRE_P points to the list where side effects that must happen before
2199 *EXPR_P should be stored.
2201 POST_P points to the list where side effects that must happen after
2202 *EXPR_P should be stored.
2204 WANT_VALUE is nonzero iff we want to use the value of this expression
2205 in another expression.
2207 ARITH_TYPE is the type the computation should be performed in. */
2209 enum gimplify_status
2210 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2211 bool want_value, tree arith_type)
2213 enum tree_code code;
2214 tree lhs, lvalue, rhs, t1;
2215 gimple_seq post = NULL, *orig_post_p = post_p;
2216 bool postfix;
2217 enum tree_code arith_code;
2218 enum gimplify_status ret;
2219 location_t loc = EXPR_LOCATION (*expr_p);
2221 code = TREE_CODE (*expr_p);
2223 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2224 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2226 /* Prefix or postfix? */
2227 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2228 /* Faster to treat as prefix if result is not used. */
2229 postfix = want_value;
2230 else
2231 postfix = false;
2233 /* For postfix, make sure the inner expression's post side effects
2234 are executed after side effects from this expression. */
2235 if (postfix)
2236 post_p = &post;
2238 /* Add or subtract? */
2239 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2240 arith_code = PLUS_EXPR;
2241 else
2242 arith_code = MINUS_EXPR;
2244 /* Gimplify the LHS into a GIMPLE lvalue. */
2245 lvalue = TREE_OPERAND (*expr_p, 0);
2246 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2247 if (ret == GS_ERROR)
2248 return ret;
2250 /* Extract the operands to the arithmetic operation. */
2251 lhs = lvalue;
2252 rhs = TREE_OPERAND (*expr_p, 1);
2254 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2255 that as the result value and in the postqueue operation. */
2256 if (postfix)
2258 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2259 if (ret == GS_ERROR)
2260 return ret;
2262 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2265 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2266 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2268 rhs = convert_to_ptrofftype_loc (loc, rhs);
2269 if (arith_code == MINUS_EXPR)
2270 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2271 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2273 else
2274 t1 = fold_convert (TREE_TYPE (*expr_p),
2275 fold_build2 (arith_code, arith_type,
2276 fold_convert (arith_type, lhs),
2277 fold_convert (arith_type, rhs)));
2279 if (postfix)
2281 gimplify_assign (lvalue, t1, pre_p);
2282 gimplify_seq_add_seq (orig_post_p, post);
2283 *expr_p = lhs;
2284 return GS_ALL_DONE;
2286 else
2288 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2289 return GS_OK;
2293 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2295 static void
2296 maybe_with_size_expr (tree *expr_p)
2298 tree expr = *expr_p;
2299 tree type = TREE_TYPE (expr);
2300 tree size;
2302 /* If we've already wrapped this or the type is error_mark_node, we can't do
2303 anything. */
2304 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2305 || type == error_mark_node)
2306 return;
2308 /* If the size isn't known or is a constant, we have nothing to do. */
2309 size = TYPE_SIZE_UNIT (type);
2310 if (!size || TREE_CODE (size) == INTEGER_CST)
2311 return;
2313 /* Otherwise, make a WITH_SIZE_EXPR. */
2314 size = unshare_expr (size);
2315 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2316 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2319 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2320 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2321 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
2322 gimplified to an SSA name. */
2324 enum gimplify_status
2325 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
2326 bool allow_ssa)
2328 bool (*test) (tree);
2329 fallback_t fb;
2331 /* In general, we allow lvalues for function arguments to avoid
2332 extra overhead of copying large aggregates out of even larger
2333 aggregates into temporaries only to copy the temporaries to
2334 the argument list. Make optimizers happy by pulling out to
2335 temporaries those types that fit in registers. */
2336 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2337 test = is_gimple_val, fb = fb_rvalue;
2338 else
2340 test = is_gimple_lvalue, fb = fb_either;
2341 /* Also strip a TARGET_EXPR that would force an extra copy. */
2342 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2344 tree init = TARGET_EXPR_INITIAL (*arg_p);
2345 if (init
2346 && !VOID_TYPE_P (TREE_TYPE (init)))
2347 *arg_p = init;
2351 /* If this is a variable sized type, we must remember the size. */
2352 maybe_with_size_expr (arg_p);
2354 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2355 /* Make sure arguments have the same location as the function call
2356 itself. */
2357 protected_set_expr_location (*arg_p, call_location);
2359 /* There is a sequence point before a function call. Side effects in
2360 the argument list must occur before the actual call. So, when
2361 gimplifying arguments, force gimplify_expr to use an internal
2362 post queue which is then appended to the end of PRE_P. */
2363 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
2366 /* Don't fold inside offloading or taskreg regions: it can break code by
2367 adding decl references that weren't in the source. We'll do it during
2368 omplower pass instead. */
2370 static bool
2371 maybe_fold_stmt (gimple_stmt_iterator *gsi)
2373 struct gimplify_omp_ctx *ctx;
2374 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2375 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
2376 return false;
2377 return fold_stmt (gsi);
2380 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2381 WANT_VALUE is true if the result of the call is desired. */
2383 static enum gimplify_status
2384 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2386 tree fndecl, parms, p, fnptrtype;
2387 enum gimplify_status ret;
2388 int i, nargs;
2389 gcall *call;
2390 bool builtin_va_start_p = false;
2391 location_t loc = EXPR_LOCATION (*expr_p);
2393 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2395 /* For reliable diagnostics during inlining, it is necessary that
2396 every call_expr be annotated with file and line. */
2397 if (! EXPR_HAS_LOCATION (*expr_p))
2398 SET_EXPR_LOCATION (*expr_p, input_location);
2400 /* Gimplify internal functions created in the FEs. */
2401 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2403 if (want_value)
2404 return GS_ALL_DONE;
2406 nargs = call_expr_nargs (*expr_p);
2407 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2408 auto_vec<tree> vargs (nargs);
2410 for (i = 0; i < nargs; i++)
2412 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2413 EXPR_LOCATION (*expr_p));
2414 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2416 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
2417 gimplify_seq_add_stmt (pre_p, call);
2418 return GS_ALL_DONE;
2421 /* This may be a call to a builtin function.
2423 Builtin function calls may be transformed into different
2424 (and more efficient) builtin function calls under certain
2425 circumstances. Unfortunately, gimplification can muck things
2426 up enough that the builtin expanders are not aware that certain
2427 transformations are still valid.
2429 So we attempt transformation/gimplification of the call before
2430 we gimplify the CALL_EXPR. At this time we do not manage to
2431 transform all calls in the same manner as the expanders do, but
2432 we do transform most of them. */
2433 fndecl = get_callee_fndecl (*expr_p);
2434 if (fndecl
2435 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2436 switch (DECL_FUNCTION_CODE (fndecl))
2438 case BUILT_IN_ALLOCA:
2439 case BUILT_IN_ALLOCA_WITH_ALIGN:
2440 /* If the call has been built for a variable-sized object, then we
2441 want to restore the stack level when the enclosing BIND_EXPR is
2442 exited to reclaim the allocated space; otherwise, we precisely
2443 need to do the opposite and preserve the latest stack level. */
2444 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
2445 gimplify_ctxp->save_stack = true;
2446 else
2447 gimplify_ctxp->keep_stack = true;
2448 break;
2450 case BUILT_IN_VA_START:
2452 builtin_va_start_p = TRUE;
2453 if (call_expr_nargs (*expr_p) < 2)
2455 error ("too few arguments to function %<va_start%>");
2456 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2457 return GS_OK;
2460 if (fold_builtin_next_arg (*expr_p, true))
2462 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2463 return GS_OK;
2465 break;
2468 default:
2471 if (fndecl && DECL_BUILT_IN (fndecl))
2473 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2474 if (new_tree && new_tree != *expr_p)
2476 /* There was a transformation of this call which computes the
2477 same value, but in a more efficient way. Return and try
2478 again. */
2479 *expr_p = new_tree;
2480 return GS_OK;
2484 /* Remember the original function pointer type. */
2485 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2487 /* There is a sequence point before the call, so any side effects in
2488 the calling expression must occur before the actual call. Force
2489 gimplify_expr to use an internal post queue. */
2490 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2491 is_gimple_call_addr, fb_rvalue);
2493 nargs = call_expr_nargs (*expr_p);
2495 /* Get argument types for verification. */
2496 fndecl = get_callee_fndecl (*expr_p);
2497 parms = NULL_TREE;
2498 if (fndecl)
2499 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2500 else
2501 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
2503 if (fndecl && DECL_ARGUMENTS (fndecl))
2504 p = DECL_ARGUMENTS (fndecl);
2505 else if (parms)
2506 p = parms;
2507 else
2508 p = NULL_TREE;
2509 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2512 /* If the last argument is __builtin_va_arg_pack () and it is not
2513 passed as a named argument, decrease the number of CALL_EXPR
2514 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2515 if (!p
2516 && i < nargs
2517 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2519 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2520 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2522 if (last_arg_fndecl
2523 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2524 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2525 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2527 tree call = *expr_p;
2529 --nargs;
2530 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2531 CALL_EXPR_FN (call),
2532 nargs, CALL_EXPR_ARGP (call));
2534 /* Copy all CALL_EXPR flags, location and block, except
2535 CALL_EXPR_VA_ARG_PACK flag. */
2536 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2537 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2538 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2539 = CALL_EXPR_RETURN_SLOT_OPT (call);
2540 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2541 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2543 /* Set CALL_EXPR_VA_ARG_PACK. */
2544 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2548 /* If the call returns twice then after building the CFG the call
2549 argument computations will no longer dominate the call because
2550 we add an abnormal incoming edge to the call. So do not use SSA
2551 vars there. */
2552 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
2554 /* Gimplify the function arguments. */
2555 if (nargs > 0)
2557 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2558 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2559 PUSH_ARGS_REVERSED ? i-- : i++)
2561 enum gimplify_status t;
2563 /* Avoid gimplifying the second argument to va_start, which needs to
2564 be the plain PARM_DECL. */
2565 if ((i != 1) || !builtin_va_start_p)
2567 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2568 EXPR_LOCATION (*expr_p), ! returns_twice);
2570 if (t == GS_ERROR)
2571 ret = GS_ERROR;
2576 /* Gimplify the static chain. */
2577 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
2579 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
2580 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
2581 else
2583 enum gimplify_status t;
2584 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
2585 EXPR_LOCATION (*expr_p), ! returns_twice);
2586 if (t == GS_ERROR)
2587 ret = GS_ERROR;
2591 /* Verify the function result. */
2592 if (want_value && fndecl
2593 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2595 error_at (loc, "using result of function returning %<void%>");
2596 ret = GS_ERROR;
2599 /* Try this again in case gimplification exposed something. */
2600 if (ret != GS_ERROR)
2602 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2604 if (new_tree && new_tree != *expr_p)
2606 /* There was a transformation of this call which computes the
2607 same value, but in a more efficient way. Return and try
2608 again. */
2609 *expr_p = new_tree;
2610 return GS_OK;
2613 else
2615 *expr_p = error_mark_node;
2616 return GS_ERROR;
2619 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2620 decl. This allows us to eliminate redundant or useless
2621 calls to "const" functions. */
2622 if (TREE_CODE (*expr_p) == CALL_EXPR)
2624 int flags = call_expr_flags (*expr_p);
2625 if (flags & (ECF_CONST | ECF_PURE)
2626 /* An infinite loop is considered a side effect. */
2627 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2628 TREE_SIDE_EFFECTS (*expr_p) = 0;
2631 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2632 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2633 form and delegate the creation of a GIMPLE_CALL to
2634 gimplify_modify_expr. This is always possible because when
2635 WANT_VALUE is true, the caller wants the result of this call into
2636 a temporary, which means that we will emit an INIT_EXPR in
2637 internal_get_tmp_var which will then be handled by
2638 gimplify_modify_expr. */
2639 if (!want_value)
2641 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2642 have to do is replicate it as a GIMPLE_CALL tuple. */
2643 gimple_stmt_iterator gsi;
2644 call = gimple_build_call_from_tree (*expr_p);
2645 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2646 notice_special_calls (call);
2647 gimplify_seq_add_stmt (pre_p, call);
2648 gsi = gsi_last (*pre_p);
2649 maybe_fold_stmt (&gsi);
2650 *expr_p = NULL_TREE;
2652 else
2653 /* Remember the original function type. */
2654 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2655 CALL_EXPR_FN (*expr_p));
2657 return ret;
2660 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2661 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2663 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2664 condition is true or false, respectively. If null, we should generate
2665 our own to skip over the evaluation of this specific expression.
2667 LOCUS is the source location of the COND_EXPR.
2669 This function is the tree equivalent of do_jump.
2671 shortcut_cond_r should only be called by shortcut_cond_expr. */
2673 static tree
2674 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2675 location_t locus)
2677 tree local_label = NULL_TREE;
2678 tree t, expr = NULL;
2680 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2681 retain the shortcut semantics. Just insert the gotos here;
2682 shortcut_cond_expr will append the real blocks later. */
2683 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2685 location_t new_locus;
2687 /* Turn if (a && b) into
2689 if (a); else goto no;
2690 if (b) goto yes; else goto no;
2691 (no:) */
2693 if (false_label_p == NULL)
2694 false_label_p = &local_label;
2696 /* Keep the original source location on the first 'if'. */
2697 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2698 append_to_statement_list (t, &expr);
2700 /* Set the source location of the && on the second 'if'. */
2701 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2702 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2703 new_locus);
2704 append_to_statement_list (t, &expr);
2706 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2708 location_t new_locus;
2710 /* Turn if (a || b) into
2712 if (a) goto yes;
2713 if (b) goto yes; else goto no;
2714 (yes:) */
2716 if (true_label_p == NULL)
2717 true_label_p = &local_label;
2719 /* Keep the original source location on the first 'if'. */
2720 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2721 append_to_statement_list (t, &expr);
2723 /* Set the source location of the || on the second 'if'. */
2724 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2725 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2726 new_locus);
2727 append_to_statement_list (t, &expr);
2729 else if (TREE_CODE (pred) == COND_EXPR
2730 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2731 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2733 location_t new_locus;
2735 /* As long as we're messing with gotos, turn if (a ? b : c) into
2736 if (a)
2737 if (b) goto yes; else goto no;
2738 else
2739 if (c) goto yes; else goto no;
2741 Don't do this if one of the arms has void type, which can happen
2742 in C++ when the arm is throw. */
2744 /* Keep the original source location on the first 'if'. Set the source
2745 location of the ? on the second 'if'. */
2746 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2747 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2748 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2749 false_label_p, locus),
2750 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2751 false_label_p, new_locus));
2753 else
2755 expr = build3 (COND_EXPR, void_type_node, pred,
2756 build_and_jump (true_label_p),
2757 build_and_jump (false_label_p));
2758 SET_EXPR_LOCATION (expr, locus);
2761 if (local_label)
2763 t = build1 (LABEL_EXPR, void_type_node, local_label);
2764 append_to_statement_list (t, &expr);
2767 return expr;
2770 /* Given a conditional expression EXPR with short-circuit boolean
2771 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2772 predicate apart into the equivalent sequence of conditionals. */
2774 static tree
2775 shortcut_cond_expr (tree expr)
2777 tree pred = TREE_OPERAND (expr, 0);
2778 tree then_ = TREE_OPERAND (expr, 1);
2779 tree else_ = TREE_OPERAND (expr, 2);
2780 tree true_label, false_label, end_label, t;
2781 tree *true_label_p;
2782 tree *false_label_p;
2783 bool emit_end, emit_false, jump_over_else;
2784 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2785 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2787 /* First do simple transformations. */
2788 if (!else_se)
2790 /* If there is no 'else', turn
2791 if (a && b) then c
2792 into
2793 if (a) if (b) then c. */
2794 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2796 /* Keep the original source location on the first 'if'. */
2797 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2798 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2799 /* Set the source location of the && on the second 'if'. */
2800 if (EXPR_HAS_LOCATION (pred))
2801 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2802 then_ = shortcut_cond_expr (expr);
2803 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2804 pred = TREE_OPERAND (pred, 0);
2805 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2806 SET_EXPR_LOCATION (expr, locus);
2810 if (!then_se)
2812 /* If there is no 'then', turn
2813 if (a || b); else d
2814 into
2815 if (a); else if (b); else d. */
2816 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2818 /* Keep the original source location on the first 'if'. */
2819 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2820 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2821 /* Set the source location of the || on the second 'if'. */
2822 if (EXPR_HAS_LOCATION (pred))
2823 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2824 else_ = shortcut_cond_expr (expr);
2825 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2826 pred = TREE_OPERAND (pred, 0);
2827 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2828 SET_EXPR_LOCATION (expr, locus);
2832 /* If we're done, great. */
2833 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2834 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2835 return expr;
2837 /* Otherwise we need to mess with gotos. Change
2838 if (a) c; else d;
2840 if (a); else goto no;
2841 c; goto end;
2842 no: d; end:
2843 and recursively gimplify the condition. */
2845 true_label = false_label = end_label = NULL_TREE;
2847 /* If our arms just jump somewhere, hijack those labels so we don't
2848 generate jumps to jumps. */
2850 if (then_
2851 && TREE_CODE (then_) == GOTO_EXPR
2852 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2854 true_label = GOTO_DESTINATION (then_);
2855 then_ = NULL;
2856 then_se = false;
2859 if (else_
2860 && TREE_CODE (else_) == GOTO_EXPR
2861 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2863 false_label = GOTO_DESTINATION (else_);
2864 else_ = NULL;
2865 else_se = false;
2868 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2869 if (true_label)
2870 true_label_p = &true_label;
2871 else
2872 true_label_p = NULL;
2874 /* The 'else' branch also needs a label if it contains interesting code. */
2875 if (false_label || else_se)
2876 false_label_p = &false_label;
2877 else
2878 false_label_p = NULL;
2880 /* If there was nothing else in our arms, just forward the label(s). */
2881 if (!then_se && !else_se)
2882 return shortcut_cond_r (pred, true_label_p, false_label_p,
2883 EXPR_LOC_OR_LOC (expr, input_location));
2885 /* If our last subexpression already has a terminal label, reuse it. */
2886 if (else_se)
2887 t = expr_last (else_);
2888 else if (then_se)
2889 t = expr_last (then_);
2890 else
2891 t = NULL;
2892 if (t && TREE_CODE (t) == LABEL_EXPR)
2893 end_label = LABEL_EXPR_LABEL (t);
2895 /* If we don't care about jumping to the 'else' branch, jump to the end
2896 if the condition is false. */
2897 if (!false_label_p)
2898 false_label_p = &end_label;
2900 /* We only want to emit these labels if we aren't hijacking them. */
2901 emit_end = (end_label == NULL_TREE);
2902 emit_false = (false_label == NULL_TREE);
2904 /* We only emit the jump over the else clause if we have to--if the
2905 then clause may fall through. Otherwise we can wind up with a
2906 useless jump and a useless label at the end of gimplified code,
2907 which will cause us to think that this conditional as a whole
2908 falls through even if it doesn't. If we then inline a function
2909 which ends with such a condition, that can cause us to issue an
2910 inappropriate warning about control reaching the end of a
2911 non-void function. */
2912 jump_over_else = block_may_fallthru (then_);
2914 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2915 EXPR_LOC_OR_LOC (expr, input_location));
2917 expr = NULL;
2918 append_to_statement_list (pred, &expr);
2920 append_to_statement_list (then_, &expr);
2921 if (else_se)
2923 if (jump_over_else)
2925 tree last = expr_last (expr);
2926 t = build_and_jump (&end_label);
2927 if (EXPR_HAS_LOCATION (last))
2928 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2929 append_to_statement_list (t, &expr);
2931 if (emit_false)
2933 t = build1 (LABEL_EXPR, void_type_node, false_label);
2934 append_to_statement_list (t, &expr);
2936 append_to_statement_list (else_, &expr);
2938 if (emit_end && end_label)
2940 t = build1 (LABEL_EXPR, void_type_node, end_label);
2941 append_to_statement_list (t, &expr);
2944 return expr;
2947 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2949 tree
2950 gimple_boolify (tree expr)
2952 tree type = TREE_TYPE (expr);
2953 location_t loc = EXPR_LOCATION (expr);
2955 if (TREE_CODE (expr) == NE_EXPR
2956 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2957 && integer_zerop (TREE_OPERAND (expr, 1)))
2959 tree call = TREE_OPERAND (expr, 0);
2960 tree fn = get_callee_fndecl (call);
2962 /* For __builtin_expect ((long) (x), y) recurse into x as well
2963 if x is truth_value_p. */
2964 if (fn
2965 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2966 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2967 && call_expr_nargs (call) == 2)
2969 tree arg = CALL_EXPR_ARG (call, 0);
2970 if (arg)
2972 if (TREE_CODE (arg) == NOP_EXPR
2973 && TREE_TYPE (arg) == TREE_TYPE (call))
2974 arg = TREE_OPERAND (arg, 0);
2975 if (truth_value_p (TREE_CODE (arg)))
2977 arg = gimple_boolify (arg);
2978 CALL_EXPR_ARG (call, 0)
2979 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2985 switch (TREE_CODE (expr))
2987 case TRUTH_AND_EXPR:
2988 case TRUTH_OR_EXPR:
2989 case TRUTH_XOR_EXPR:
2990 case TRUTH_ANDIF_EXPR:
2991 case TRUTH_ORIF_EXPR:
2992 /* Also boolify the arguments of truth exprs. */
2993 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2994 /* FALLTHRU */
2996 case TRUTH_NOT_EXPR:
2997 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2999 /* These expressions always produce boolean results. */
3000 if (TREE_CODE (type) != BOOLEAN_TYPE)
3001 TREE_TYPE (expr) = boolean_type_node;
3002 return expr;
3004 case ANNOTATE_EXPR:
3005 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3007 case annot_expr_ivdep_kind:
3008 case annot_expr_no_vector_kind:
3009 case annot_expr_vector_kind:
3010 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3011 if (TREE_CODE (type) != BOOLEAN_TYPE)
3012 TREE_TYPE (expr) = boolean_type_node;
3013 return expr;
3014 default:
3015 gcc_unreachable ();
3018 default:
3019 if (COMPARISON_CLASS_P (expr))
3021 /* There expressions always prduce boolean results. */
3022 if (TREE_CODE (type) != BOOLEAN_TYPE)
3023 TREE_TYPE (expr) = boolean_type_node;
3024 return expr;
3026 /* Other expressions that get here must have boolean values, but
3027 might need to be converted to the appropriate mode. */
3028 if (TREE_CODE (type) == BOOLEAN_TYPE)
3029 return expr;
3030 return fold_convert_loc (loc, boolean_type_node, expr);
3034 /* Given a conditional expression *EXPR_P without side effects, gimplify
3035 its operands. New statements are inserted to PRE_P. */
3037 static enum gimplify_status
3038 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3040 tree expr = *expr_p, cond;
3041 enum gimplify_status ret, tret;
3042 enum tree_code code;
3044 cond = gimple_boolify (COND_EXPR_COND (expr));
3046 /* We need to handle && and || specially, as their gimplification
3047 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3048 code = TREE_CODE (cond);
3049 if (code == TRUTH_ANDIF_EXPR)
3050 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3051 else if (code == TRUTH_ORIF_EXPR)
3052 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3053 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3054 COND_EXPR_COND (*expr_p) = cond;
3056 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3057 is_gimple_val, fb_rvalue);
3058 ret = MIN (ret, tret);
3059 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3060 is_gimple_val, fb_rvalue);
3062 return MIN (ret, tret);
3065 /* Return true if evaluating EXPR could trap.
3066 EXPR is GENERIC, while tree_could_trap_p can be called
3067 only on GIMPLE. */
3069 static bool
3070 generic_expr_could_trap_p (tree expr)
3072 unsigned i, n;
3074 if (!expr || is_gimple_val (expr))
3075 return false;
3077 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3078 return true;
3080 n = TREE_OPERAND_LENGTH (expr);
3081 for (i = 0; i < n; i++)
3082 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3083 return true;
3085 return false;
3088 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3089 into
3091 if (p) if (p)
3092 t1 = a; a;
3093 else or else
3094 t1 = b; b;
3097 The second form is used when *EXPR_P is of type void.
3099 PRE_P points to the list where side effects that must happen before
3100 *EXPR_P should be stored. */
3102 static enum gimplify_status
3103 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3105 tree expr = *expr_p;
3106 tree type = TREE_TYPE (expr);
3107 location_t loc = EXPR_LOCATION (expr);
3108 tree tmp, arm1, arm2;
3109 enum gimplify_status ret;
3110 tree label_true, label_false, label_cont;
3111 bool have_then_clause_p, have_else_clause_p;
3112 gcond *cond_stmt;
3113 enum tree_code pred_code;
3114 gimple_seq seq = NULL;
3116 /* If this COND_EXPR has a value, copy the values into a temporary within
3117 the arms. */
3118 if (!VOID_TYPE_P (type))
3120 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3121 tree result;
3123 /* If either an rvalue is ok or we do not require an lvalue, create the
3124 temporary. But we cannot do that if the type is addressable. */
3125 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3126 && !TREE_ADDRESSABLE (type))
3128 if (gimplify_ctxp->allow_rhs_cond_expr
3129 /* If either branch has side effects or could trap, it can't be
3130 evaluated unconditionally. */
3131 && !TREE_SIDE_EFFECTS (then_)
3132 && !generic_expr_could_trap_p (then_)
3133 && !TREE_SIDE_EFFECTS (else_)
3134 && !generic_expr_could_trap_p (else_))
3135 return gimplify_pure_cond_expr (expr_p, pre_p);
3137 tmp = create_tmp_var (type, "iftmp");
3138 result = tmp;
3141 /* Otherwise, only create and copy references to the values. */
3142 else
3144 type = build_pointer_type (type);
3146 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3147 then_ = build_fold_addr_expr_loc (loc, then_);
3149 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3150 else_ = build_fold_addr_expr_loc (loc, else_);
3152 expr
3153 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3155 tmp = create_tmp_var (type, "iftmp");
3156 result = build_simple_mem_ref_loc (loc, tmp);
3159 /* Build the new then clause, `tmp = then_;'. But don't build the
3160 assignment if the value is void; in C++ it can be if it's a throw. */
3161 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3162 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3164 /* Similarly, build the new else clause, `tmp = else_;'. */
3165 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3166 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3168 TREE_TYPE (expr) = void_type_node;
3169 recalculate_side_effects (expr);
3171 /* Move the COND_EXPR to the prequeue. */
3172 gimplify_stmt (&expr, pre_p);
3174 *expr_p = result;
3175 return GS_ALL_DONE;
3178 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3179 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3180 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3181 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3183 /* Make sure the condition has BOOLEAN_TYPE. */
3184 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3186 /* Break apart && and || conditions. */
3187 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3188 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3190 expr = shortcut_cond_expr (expr);
3192 if (expr != *expr_p)
3194 *expr_p = expr;
3196 /* We can't rely on gimplify_expr to re-gimplify the expanded
3197 form properly, as cleanups might cause the target labels to be
3198 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3199 set up a conditional context. */
3200 gimple_push_condition ();
3201 gimplify_stmt (expr_p, &seq);
3202 gimple_pop_condition (pre_p);
3203 gimple_seq_add_seq (pre_p, seq);
3205 return GS_ALL_DONE;
3209 /* Now do the normal gimplification. */
3211 /* Gimplify condition. */
3212 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3213 fb_rvalue);
3214 if (ret == GS_ERROR)
3215 return GS_ERROR;
3216 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3218 gimple_push_condition ();
3220 have_then_clause_p = have_else_clause_p = false;
3221 if (TREE_OPERAND (expr, 1) != NULL
3222 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3223 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3224 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3225 == current_function_decl)
3226 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3227 have different locations, otherwise we end up with incorrect
3228 location information on the branches. */
3229 && (optimize
3230 || !EXPR_HAS_LOCATION (expr)
3231 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3232 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3234 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3235 have_then_clause_p = true;
3237 else
3238 label_true = create_artificial_label (UNKNOWN_LOCATION);
3239 if (TREE_OPERAND (expr, 2) != NULL
3240 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3241 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3242 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3243 == current_function_decl)
3244 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3245 have different locations, otherwise we end up with incorrect
3246 location information on the branches. */
3247 && (optimize
3248 || !EXPR_HAS_LOCATION (expr)
3249 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3250 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3252 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3253 have_else_clause_p = true;
3255 else
3256 label_false = create_artificial_label (UNKNOWN_LOCATION);
3258 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3259 &arm2);
3260 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3261 label_false);
3262 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
3263 gimplify_seq_add_stmt (&seq, cond_stmt);
3264 gimple_stmt_iterator gsi = gsi_last (seq);
3265 maybe_fold_stmt (&gsi);
3267 label_cont = NULL_TREE;
3268 if (!have_then_clause_p)
3270 /* For if (...) {} else { code; } put label_true after
3271 the else block. */
3272 if (TREE_OPERAND (expr, 1) == NULL_TREE
3273 && !have_else_clause_p
3274 && TREE_OPERAND (expr, 2) != NULL_TREE)
3275 label_cont = label_true;
3276 else
3278 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3279 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3280 /* For if (...) { code; } else {} or
3281 if (...) { code; } else goto label; or
3282 if (...) { code; return; } else { ... }
3283 label_cont isn't needed. */
3284 if (!have_else_clause_p
3285 && TREE_OPERAND (expr, 2) != NULL_TREE
3286 && gimple_seq_may_fallthru (seq))
3288 gimple *g;
3289 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3291 g = gimple_build_goto (label_cont);
3293 /* GIMPLE_COND's are very low level; they have embedded
3294 gotos. This particular embedded goto should not be marked
3295 with the location of the original COND_EXPR, as it would
3296 correspond to the COND_EXPR's condition, not the ELSE or the
3297 THEN arms. To avoid marking it with the wrong location, flag
3298 it as "no location". */
3299 gimple_set_do_not_emit_location (g);
3301 gimplify_seq_add_stmt (&seq, g);
3305 if (!have_else_clause_p)
3307 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3308 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3310 if (label_cont)
3311 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3313 gimple_pop_condition (pre_p);
3314 gimple_seq_add_seq (pre_p, seq);
3316 if (ret == GS_ERROR)
3317 ; /* Do nothing. */
3318 else if (have_then_clause_p || have_else_clause_p)
3319 ret = GS_ALL_DONE;
3320 else
3322 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3323 expr = TREE_OPERAND (expr, 0);
3324 gimplify_stmt (&expr, pre_p);
3327 *expr_p = NULL;
3328 return ret;
3331 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3332 to be marked addressable.
3334 We cannot rely on such an expression being directly markable if a temporary
3335 has been created by the gimplification. In this case, we create another
3336 temporary and initialize it with a copy, which will become a store after we
3337 mark it addressable. This can happen if the front-end passed us something
3338 that it could not mark addressable yet, like a Fortran pass-by-reference
3339 parameter (int) floatvar. */
3341 static void
3342 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3344 while (handled_component_p (*expr_p))
3345 expr_p = &TREE_OPERAND (*expr_p, 0);
3346 if (is_gimple_reg (*expr_p))
3348 /* Do not allow an SSA name as the temporary. */
3349 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
3350 DECL_GIMPLE_REG_P (var) = 0;
3351 *expr_p = var;
3355 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3356 a call to __builtin_memcpy. */
3358 static enum gimplify_status
3359 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3360 gimple_seq *seq_p)
3362 tree t, to, to_ptr, from, from_ptr;
3363 gcall *gs;
3364 location_t loc = EXPR_LOCATION (*expr_p);
3366 to = TREE_OPERAND (*expr_p, 0);
3367 from = TREE_OPERAND (*expr_p, 1);
3369 /* Mark the RHS addressable. Beware that it may not be possible to do so
3370 directly if a temporary has been created by the gimplification. */
3371 prepare_gimple_addressable (&from, seq_p);
3373 mark_addressable (from);
3374 from_ptr = build_fold_addr_expr_loc (loc, from);
3375 gimplify_arg (&from_ptr, seq_p, loc);
3377 mark_addressable (to);
3378 to_ptr = build_fold_addr_expr_loc (loc, to);
3379 gimplify_arg (&to_ptr, seq_p, loc);
3381 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3383 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3385 if (want_value)
3387 /* tmp = memcpy() */
3388 t = create_tmp_var (TREE_TYPE (to_ptr));
3389 gimple_call_set_lhs (gs, t);
3390 gimplify_seq_add_stmt (seq_p, gs);
3392 *expr_p = build_simple_mem_ref (t);
3393 return GS_ALL_DONE;
3396 gimplify_seq_add_stmt (seq_p, gs);
3397 *expr_p = NULL;
3398 return GS_ALL_DONE;
3401 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3402 a call to __builtin_memset. In this case we know that the RHS is
3403 a CONSTRUCTOR with an empty element list. */
3405 static enum gimplify_status
3406 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3407 gimple_seq *seq_p)
3409 tree t, from, to, to_ptr;
3410 gcall *gs;
3411 location_t loc = EXPR_LOCATION (*expr_p);
3413 /* Assert our assumptions, to abort instead of producing wrong code
3414 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3415 not be immediately exposed. */
3416 from = TREE_OPERAND (*expr_p, 1);
3417 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3418 from = TREE_OPERAND (from, 0);
3420 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3421 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3423 /* Now proceed. */
3424 to = TREE_OPERAND (*expr_p, 0);
3426 to_ptr = build_fold_addr_expr_loc (loc, to);
3427 gimplify_arg (&to_ptr, seq_p, loc);
3428 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3430 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3432 if (want_value)
3434 /* tmp = memset() */
3435 t = create_tmp_var (TREE_TYPE (to_ptr));
3436 gimple_call_set_lhs (gs, t);
3437 gimplify_seq_add_stmt (seq_p, gs);
3439 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3440 return GS_ALL_DONE;
3443 gimplify_seq_add_stmt (seq_p, gs);
3444 *expr_p = NULL;
3445 return GS_ALL_DONE;
3448 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3449 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3450 assignment. Return non-null if we detect a potential overlap. */
3452 struct gimplify_init_ctor_preeval_data
3454 /* The base decl of the lhs object. May be NULL, in which case we
3455 have to assume the lhs is indirect. */
3456 tree lhs_base_decl;
3458 /* The alias set of the lhs object. */
3459 alias_set_type lhs_alias_set;
3462 static tree
3463 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3465 struct gimplify_init_ctor_preeval_data *data
3466 = (struct gimplify_init_ctor_preeval_data *) xdata;
3467 tree t = *tp;
3469 /* If we find the base object, obviously we have overlap. */
3470 if (data->lhs_base_decl == t)
3471 return t;
3473 /* If the constructor component is indirect, determine if we have a
3474 potential overlap with the lhs. The only bits of information we
3475 have to go on at this point are addressability and alias sets. */
3476 if ((INDIRECT_REF_P (t)
3477 || TREE_CODE (t) == MEM_REF)
3478 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3479 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3480 return t;
3482 /* If the constructor component is a call, determine if it can hide a
3483 potential overlap with the lhs through an INDIRECT_REF like above.
3484 ??? Ugh - this is completely broken. In fact this whole analysis
3485 doesn't look conservative. */
3486 if (TREE_CODE (t) == CALL_EXPR)
3488 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3490 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3491 if (POINTER_TYPE_P (TREE_VALUE (type))
3492 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3493 && alias_sets_conflict_p (data->lhs_alias_set,
3494 get_alias_set
3495 (TREE_TYPE (TREE_VALUE (type)))))
3496 return t;
3499 if (IS_TYPE_OR_DECL_P (t))
3500 *walk_subtrees = 0;
3501 return NULL;
3504 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3505 force values that overlap with the lhs (as described by *DATA)
3506 into temporaries. */
3508 static void
3509 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3510 struct gimplify_init_ctor_preeval_data *data)
3512 enum gimplify_status one;
3514 /* If the value is constant, then there's nothing to pre-evaluate. */
3515 if (TREE_CONSTANT (*expr_p))
3517 /* Ensure it does not have side effects, it might contain a reference to
3518 the object we're initializing. */
3519 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3520 return;
3523 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3524 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3525 return;
3527 /* Recurse for nested constructors. */
3528 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3530 unsigned HOST_WIDE_INT ix;
3531 constructor_elt *ce;
3532 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3534 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3535 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3537 return;
3540 /* If this is a variable sized type, we must remember the size. */
3541 maybe_with_size_expr (expr_p);
3543 /* Gimplify the constructor element to something appropriate for the rhs
3544 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3545 the gimplifier will consider this a store to memory. Doing this
3546 gimplification now means that we won't have to deal with complicated
3547 language-specific trees, nor trees like SAVE_EXPR that can induce
3548 exponential search behavior. */
3549 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3550 if (one == GS_ERROR)
3552 *expr_p = NULL;
3553 return;
3556 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3557 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3558 always be true for all scalars, since is_gimple_mem_rhs insists on a
3559 temporary variable for them. */
3560 if (DECL_P (*expr_p))
3561 return;
3563 /* If this is of variable size, we have no choice but to assume it doesn't
3564 overlap since we can't make a temporary for it. */
3565 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3566 return;
3568 /* Otherwise, we must search for overlap ... */
3569 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3570 return;
3572 /* ... and if found, force the value into a temporary. */
3573 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3576 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3577 a RANGE_EXPR in a CONSTRUCTOR for an array.
3579 var = lower;
3580 loop_entry:
3581 object[var] = value;
3582 if (var == upper)
3583 goto loop_exit;
3584 var = var + 1;
3585 goto loop_entry;
3586 loop_exit:
3588 We increment var _after_ the loop exit check because we might otherwise
3589 fail if upper == TYPE_MAX_VALUE (type for upper).
3591 Note that we never have to deal with SAVE_EXPRs here, because this has
3592 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3594 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3595 gimple_seq *, bool);
3597 static void
3598 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3599 tree value, tree array_elt_type,
3600 gimple_seq *pre_p, bool cleared)
3602 tree loop_entry_label, loop_exit_label, fall_thru_label;
3603 tree var, var_type, cref, tmp;
3605 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3606 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3607 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3609 /* Create and initialize the index variable. */
3610 var_type = TREE_TYPE (upper);
3611 var = create_tmp_var (var_type);
3612 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3614 /* Add the loop entry label. */
3615 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3617 /* Build the reference. */
3618 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3619 var, NULL_TREE, NULL_TREE);
3621 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3622 the store. Otherwise just assign value to the reference. */
3624 if (TREE_CODE (value) == CONSTRUCTOR)
3625 /* NB we might have to call ourself recursively through
3626 gimplify_init_ctor_eval if the value is a constructor. */
3627 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3628 pre_p, cleared);
3629 else
3630 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3632 /* We exit the loop when the index var is equal to the upper bound. */
3633 gimplify_seq_add_stmt (pre_p,
3634 gimple_build_cond (EQ_EXPR, var, upper,
3635 loop_exit_label, fall_thru_label));
3637 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3639 /* Otherwise, increment the index var... */
3640 tmp = build2 (PLUS_EXPR, var_type, var,
3641 fold_convert (var_type, integer_one_node));
3642 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3644 /* ...and jump back to the loop entry. */
3645 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3647 /* Add the loop exit label. */
3648 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3651 /* Return true if FDECL is accessing a field that is zero sized. */
3653 static bool
3654 zero_sized_field_decl (const_tree fdecl)
3656 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3657 && integer_zerop (DECL_SIZE (fdecl)))
3658 return true;
3659 return false;
3662 /* Return true if TYPE is zero sized. */
3664 static bool
3665 zero_sized_type (const_tree type)
3667 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3668 && integer_zerop (TYPE_SIZE (type)))
3669 return true;
3670 return false;
3673 /* A subroutine of gimplify_init_constructor. Generate individual
3674 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3675 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3676 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3677 zeroed first. */
3679 static void
3680 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3681 gimple_seq *pre_p, bool cleared)
3683 tree array_elt_type = NULL;
3684 unsigned HOST_WIDE_INT ix;
3685 tree purpose, value;
3687 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3688 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3690 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3692 tree cref;
3694 /* NULL values are created above for gimplification errors. */
3695 if (value == NULL)
3696 continue;
3698 if (cleared && initializer_zerop (value))
3699 continue;
3701 /* ??? Here's to hoping the front end fills in all of the indices,
3702 so we don't have to figure out what's missing ourselves. */
3703 gcc_assert (purpose);
3705 /* Skip zero-sized fields, unless value has side-effects. This can
3706 happen with calls to functions returning a zero-sized type, which
3707 we shouldn't discard. As a number of downstream passes don't
3708 expect sets of zero-sized fields, we rely on the gimplification of
3709 the MODIFY_EXPR we make below to drop the assignment statement. */
3710 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3711 continue;
3713 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3714 whole range. */
3715 if (TREE_CODE (purpose) == RANGE_EXPR)
3717 tree lower = TREE_OPERAND (purpose, 0);
3718 tree upper = TREE_OPERAND (purpose, 1);
3720 /* If the lower bound is equal to upper, just treat it as if
3721 upper was the index. */
3722 if (simple_cst_equal (lower, upper))
3723 purpose = upper;
3724 else
3726 gimplify_init_ctor_eval_range (object, lower, upper, value,
3727 array_elt_type, pre_p, cleared);
3728 continue;
3732 if (array_elt_type)
3734 /* Do not use bitsizetype for ARRAY_REF indices. */
3735 if (TYPE_DOMAIN (TREE_TYPE (object)))
3736 purpose
3737 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3738 purpose);
3739 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3740 purpose, NULL_TREE, NULL_TREE);
3742 else
3744 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3745 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3746 unshare_expr (object), purpose, NULL_TREE);
3749 if (TREE_CODE (value) == CONSTRUCTOR
3750 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3751 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3752 pre_p, cleared);
3753 else
3755 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3756 gimplify_and_add (init, pre_p);
3757 ggc_free (init);
3762 /* Return the appropriate RHS predicate for this LHS. */
3764 gimple_predicate
3765 rhs_predicate_for (tree lhs)
3767 if (is_gimple_reg (lhs))
3768 return is_gimple_reg_rhs_or_call;
3769 else
3770 return is_gimple_mem_rhs_or_call;
3773 /* Gimplify a C99 compound literal expression. This just means adding
3774 the DECL_EXPR before the current statement and using its anonymous
3775 decl instead. */
3777 static enum gimplify_status
3778 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3779 bool (*gimple_test_f) (tree),
3780 fallback_t fallback)
3782 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3783 tree decl = DECL_EXPR_DECL (decl_s);
3784 tree init = DECL_INITIAL (decl);
3785 /* Mark the decl as addressable if the compound literal
3786 expression is addressable now, otherwise it is marked too late
3787 after we gimplify the initialization expression. */
3788 if (TREE_ADDRESSABLE (*expr_p))
3789 TREE_ADDRESSABLE (decl) = 1;
3790 /* Otherwise, if we don't need an lvalue and have a literal directly
3791 substitute it. Check if it matches the gimple predicate, as
3792 otherwise we'd generate a new temporary, and we can as well just
3793 use the decl we already have. */
3794 else if (!TREE_ADDRESSABLE (decl)
3795 && init
3796 && (fallback & fb_lvalue) == 0
3797 && gimple_test_f (init))
3799 *expr_p = init;
3800 return GS_OK;
3803 /* Preliminarily mark non-addressed complex variables as eligible
3804 for promotion to gimple registers. We'll transform their uses
3805 as we find them. */
3806 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3807 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3808 && !TREE_THIS_VOLATILE (decl)
3809 && !needs_to_live_in_memory (decl))
3810 DECL_GIMPLE_REG_P (decl) = 1;
3812 /* If the decl is not addressable, then it is being used in some
3813 expression or on the right hand side of a statement, and it can
3814 be put into a readonly data section. */
3815 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3816 TREE_READONLY (decl) = 1;
3818 /* This decl isn't mentioned in the enclosing block, so add it to the
3819 list of temps. FIXME it seems a bit of a kludge to say that
3820 anonymous artificial vars aren't pushed, but everything else is. */
3821 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3822 gimple_add_tmp_var (decl);
3824 gimplify_and_add (decl_s, pre_p);
3825 *expr_p = decl;
3826 return GS_OK;
3829 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3830 return a new CONSTRUCTOR if something changed. */
3832 static tree
3833 optimize_compound_literals_in_ctor (tree orig_ctor)
3835 tree ctor = orig_ctor;
3836 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3837 unsigned int idx, num = vec_safe_length (elts);
3839 for (idx = 0; idx < num; idx++)
3841 tree value = (*elts)[idx].value;
3842 tree newval = value;
3843 if (TREE_CODE (value) == CONSTRUCTOR)
3844 newval = optimize_compound_literals_in_ctor (value);
3845 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3847 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3848 tree decl = DECL_EXPR_DECL (decl_s);
3849 tree init = DECL_INITIAL (decl);
3851 if (!TREE_ADDRESSABLE (value)
3852 && !TREE_ADDRESSABLE (decl)
3853 && init
3854 && TREE_CODE (init) == CONSTRUCTOR)
3855 newval = optimize_compound_literals_in_ctor (init);
3857 if (newval == value)
3858 continue;
3860 if (ctor == orig_ctor)
3862 ctor = copy_node (orig_ctor);
3863 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3864 elts = CONSTRUCTOR_ELTS (ctor);
3866 (*elts)[idx].value = newval;
3868 return ctor;
3871 /* A subroutine of gimplify_modify_expr. Break out elements of a
3872 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3874 Note that we still need to clear any elements that don't have explicit
3875 initializers, so if not all elements are initialized we keep the
3876 original MODIFY_EXPR, we just remove all of the constructor elements.
3878 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3879 GS_ERROR if we would have to create a temporary when gimplifying
3880 this constructor. Otherwise, return GS_OK.
3882 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3884 static enum gimplify_status
3885 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3886 bool want_value, bool notify_temp_creation)
3888 tree object, ctor, type;
3889 enum gimplify_status ret;
3890 vec<constructor_elt, va_gc> *elts;
3892 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3894 if (!notify_temp_creation)
3896 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3897 is_gimple_lvalue, fb_lvalue);
3898 if (ret == GS_ERROR)
3899 return ret;
3902 object = TREE_OPERAND (*expr_p, 0);
3903 ctor = TREE_OPERAND (*expr_p, 1) =
3904 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3905 type = TREE_TYPE (ctor);
3906 elts = CONSTRUCTOR_ELTS (ctor);
3907 ret = GS_ALL_DONE;
3909 switch (TREE_CODE (type))
3911 case RECORD_TYPE:
3912 case UNION_TYPE:
3913 case QUAL_UNION_TYPE:
3914 case ARRAY_TYPE:
3916 struct gimplify_init_ctor_preeval_data preeval_data;
3917 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3918 bool cleared, complete_p, valid_const_initializer;
3920 /* Aggregate types must lower constructors to initialization of
3921 individual elements. The exception is that a CONSTRUCTOR node
3922 with no elements indicates zero-initialization of the whole. */
3923 if (vec_safe_is_empty (elts))
3925 if (notify_temp_creation)
3926 return GS_OK;
3927 break;
3930 /* Fetch information about the constructor to direct later processing.
3931 We might want to make static versions of it in various cases, and
3932 can only do so if it known to be a valid constant initializer. */
3933 valid_const_initializer
3934 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3935 &num_ctor_elements, &complete_p);
3937 /* If a const aggregate variable is being initialized, then it
3938 should never be a lose to promote the variable to be static. */
3939 if (valid_const_initializer
3940 && num_nonzero_elements > 1
3941 && TREE_READONLY (object)
3942 && TREE_CODE (object) == VAR_DECL
3943 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3945 if (notify_temp_creation)
3946 return GS_ERROR;
3947 DECL_INITIAL (object) = ctor;
3948 TREE_STATIC (object) = 1;
3949 if (!DECL_NAME (object))
3950 DECL_NAME (object) = create_tmp_var_name ("C");
3951 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3953 /* ??? C++ doesn't automatically append a .<number> to the
3954 assembler name, and even when it does, it looks at FE private
3955 data structures to figure out what that number should be,
3956 which are not set for this variable. I suppose this is
3957 important for local statics for inline functions, which aren't
3958 "local" in the object file sense. So in order to get a unique
3959 TU-local symbol, we must invoke the lhd version now. */
3960 lhd_set_decl_assembler_name (object);
3962 *expr_p = NULL_TREE;
3963 break;
3966 /* If there are "lots" of initialized elements, even discounting
3967 those that are not address constants (and thus *must* be
3968 computed at runtime), then partition the constructor into
3969 constant and non-constant parts. Block copy the constant
3970 parts in, then generate code for the non-constant parts. */
3971 /* TODO. There's code in cp/typeck.c to do this. */
3973 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3974 /* store_constructor will ignore the clearing of variable-sized
3975 objects. Initializers for such objects must explicitly set
3976 every field that needs to be set. */
3977 cleared = false;
3978 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3979 /* If the constructor isn't complete, clear the whole object
3980 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3982 ??? This ought not to be needed. For any element not present
3983 in the initializer, we should simply set them to zero. Except
3984 we'd need to *find* the elements that are not present, and that
3985 requires trickery to avoid quadratic compile-time behavior in
3986 large cases or excessive memory use in small cases. */
3987 cleared = true;
3988 else if (num_ctor_elements - num_nonzero_elements
3989 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3990 && num_nonzero_elements < num_ctor_elements / 4)
3991 /* If there are "lots" of zeros, it's more efficient to clear
3992 the memory and then set the nonzero elements. */
3993 cleared = true;
3994 else
3995 cleared = false;
3997 /* If there are "lots" of initialized elements, and all of them
3998 are valid address constants, then the entire initializer can
3999 be dropped to memory, and then memcpy'd out. Don't do this
4000 for sparse arrays, though, as it's more efficient to follow
4001 the standard CONSTRUCTOR behavior of memset followed by
4002 individual element initialization. Also don't do this for small
4003 all-zero initializers (which aren't big enough to merit
4004 clearing), and don't try to make bitwise copies of
4005 TREE_ADDRESSABLE types.
4007 We cannot apply such transformation when compiling chkp static
4008 initializer because creation of initializer image in the memory
4009 will require static initialization of bounds for it. It should
4010 result in another gimplification of similar initializer and we
4011 may fall into infinite loop. */
4012 if (valid_const_initializer
4013 && !(cleared || num_nonzero_elements == 0)
4014 && !TREE_ADDRESSABLE (type)
4015 && (!current_function_decl
4016 || !lookup_attribute ("chkp ctor",
4017 DECL_ATTRIBUTES (current_function_decl))))
4019 HOST_WIDE_INT size = int_size_in_bytes (type);
4020 unsigned int align;
4022 /* ??? We can still get unbounded array types, at least
4023 from the C++ front end. This seems wrong, but attempt
4024 to work around it for now. */
4025 if (size < 0)
4027 size = int_size_in_bytes (TREE_TYPE (object));
4028 if (size >= 0)
4029 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4032 /* Find the maximum alignment we can assume for the object. */
4033 /* ??? Make use of DECL_OFFSET_ALIGN. */
4034 if (DECL_P (object))
4035 align = DECL_ALIGN (object);
4036 else
4037 align = TYPE_ALIGN (type);
4039 /* Do a block move either if the size is so small as to make
4040 each individual move a sub-unit move on average, or if it
4041 is so large as to make individual moves inefficient. */
4042 if (size > 0
4043 && num_nonzero_elements > 1
4044 && (size < num_nonzero_elements
4045 || !can_move_by_pieces (size, align)))
4047 if (notify_temp_creation)
4048 return GS_ERROR;
4050 walk_tree (&ctor, force_labels_r, NULL, NULL);
4051 ctor = tree_output_constant_def (ctor);
4052 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4053 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4054 TREE_OPERAND (*expr_p, 1) = ctor;
4056 /* This is no longer an assignment of a CONSTRUCTOR, but
4057 we still may have processing to do on the LHS. So
4058 pretend we didn't do anything here to let that happen. */
4059 return GS_UNHANDLED;
4063 /* If the target is volatile, we have non-zero elements and more than
4064 one field to assign, initialize the target from a temporary. */
4065 if (TREE_THIS_VOLATILE (object)
4066 && !TREE_ADDRESSABLE (type)
4067 && num_nonzero_elements > 0
4068 && vec_safe_length (elts) > 1)
4070 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4071 TREE_OPERAND (*expr_p, 0) = temp;
4072 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4073 *expr_p,
4074 build2 (MODIFY_EXPR, void_type_node,
4075 object, temp));
4076 return GS_OK;
4079 if (notify_temp_creation)
4080 return GS_OK;
4082 /* If there are nonzero elements and if needed, pre-evaluate to capture
4083 elements overlapping with the lhs into temporaries. We must do this
4084 before clearing to fetch the values before they are zeroed-out. */
4085 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4087 preeval_data.lhs_base_decl = get_base_address (object);
4088 if (!DECL_P (preeval_data.lhs_base_decl))
4089 preeval_data.lhs_base_decl = NULL;
4090 preeval_data.lhs_alias_set = get_alias_set (object);
4092 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4093 pre_p, post_p, &preeval_data);
4096 bool ctor_has_side_effects_p
4097 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4099 if (cleared)
4101 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4102 Note that we still have to gimplify, in order to handle the
4103 case of variable sized types. Avoid shared tree structures. */
4104 CONSTRUCTOR_ELTS (ctor) = NULL;
4105 TREE_SIDE_EFFECTS (ctor) = 0;
4106 object = unshare_expr (object);
4107 gimplify_stmt (expr_p, pre_p);
4110 /* If we have not block cleared the object, or if there are nonzero
4111 elements in the constructor, or if the constructor has side effects,
4112 add assignments to the individual scalar fields of the object. */
4113 if (!cleared
4114 || num_nonzero_elements > 0
4115 || ctor_has_side_effects_p)
4116 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4118 *expr_p = NULL_TREE;
4120 break;
4122 case COMPLEX_TYPE:
4124 tree r, i;
4126 if (notify_temp_creation)
4127 return GS_OK;
4129 /* Extract the real and imaginary parts out of the ctor. */
4130 gcc_assert (elts->length () == 2);
4131 r = (*elts)[0].value;
4132 i = (*elts)[1].value;
4133 if (r == NULL || i == NULL)
4135 tree zero = build_zero_cst (TREE_TYPE (type));
4136 if (r == NULL)
4137 r = zero;
4138 if (i == NULL)
4139 i = zero;
4142 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4143 represent creation of a complex value. */
4144 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4146 ctor = build_complex (type, r, i);
4147 TREE_OPERAND (*expr_p, 1) = ctor;
4149 else
4151 ctor = build2 (COMPLEX_EXPR, type, r, i);
4152 TREE_OPERAND (*expr_p, 1) = ctor;
4153 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4154 pre_p,
4155 post_p,
4156 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4157 fb_rvalue);
4160 break;
4162 case VECTOR_TYPE:
4164 unsigned HOST_WIDE_INT ix;
4165 constructor_elt *ce;
4167 if (notify_temp_creation)
4168 return GS_OK;
4170 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4171 if (TREE_CONSTANT (ctor))
4173 bool constant_p = true;
4174 tree value;
4176 /* Even when ctor is constant, it might contain non-*_CST
4177 elements, such as addresses or trapping values like
4178 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4179 in VECTOR_CST nodes. */
4180 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4181 if (!CONSTANT_CLASS_P (value))
4183 constant_p = false;
4184 break;
4187 if (constant_p)
4189 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4190 break;
4193 TREE_CONSTANT (ctor) = 0;
4196 /* Vector types use CONSTRUCTOR all the way through gimple
4197 compilation as a general initializer. */
4198 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4200 enum gimplify_status tret;
4201 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4202 fb_rvalue);
4203 if (tret == GS_ERROR)
4204 ret = GS_ERROR;
4205 else if (TREE_STATIC (ctor)
4206 && !initializer_constant_valid_p (ce->value,
4207 TREE_TYPE (ce->value)))
4208 TREE_STATIC (ctor) = 0;
4210 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4211 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4213 break;
4215 default:
4216 /* So how did we get a CONSTRUCTOR for a scalar type? */
4217 gcc_unreachable ();
4220 if (ret == GS_ERROR)
4221 return GS_ERROR;
4222 else if (want_value)
4224 *expr_p = object;
4225 return GS_OK;
4227 else
4229 /* If we have gimplified both sides of the initializer but have
4230 not emitted an assignment, do so now. */
4231 if (*expr_p)
4233 tree lhs = TREE_OPERAND (*expr_p, 0);
4234 tree rhs = TREE_OPERAND (*expr_p, 1);
4235 gassign *init = gimple_build_assign (lhs, rhs);
4236 gimplify_seq_add_stmt (pre_p, init);
4237 *expr_p = NULL;
4240 return GS_ALL_DONE;
4244 /* Given a pointer value OP0, return a simplified version of an
4245 indirection through OP0, or NULL_TREE if no simplification is
4246 possible. This may only be applied to a rhs of an expression.
4247 Note that the resulting type may be different from the type pointed
4248 to in the sense that it is still compatible from the langhooks
4249 point of view. */
4251 static tree
4252 gimple_fold_indirect_ref_rhs (tree t)
4254 return gimple_fold_indirect_ref (t);
4257 /* Subroutine of gimplify_modify_expr to do simplifications of
4258 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4259 something changes. */
4261 static enum gimplify_status
4262 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4263 gimple_seq *pre_p, gimple_seq *post_p,
4264 bool want_value)
4266 enum gimplify_status ret = GS_UNHANDLED;
4267 bool changed;
4271 changed = false;
4272 switch (TREE_CODE (*from_p))
4274 case VAR_DECL:
4275 /* If we're assigning from a read-only variable initialized with
4276 a constructor, do the direct assignment from the constructor,
4277 but only if neither source nor target are volatile since this
4278 latter assignment might end up being done on a per-field basis. */
4279 if (DECL_INITIAL (*from_p)
4280 && TREE_READONLY (*from_p)
4281 && !TREE_THIS_VOLATILE (*from_p)
4282 && !TREE_THIS_VOLATILE (*to_p)
4283 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4285 tree old_from = *from_p;
4286 enum gimplify_status subret;
4288 /* Move the constructor into the RHS. */
4289 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4291 /* Let's see if gimplify_init_constructor will need to put
4292 it in memory. */
4293 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4294 false, true);
4295 if (subret == GS_ERROR)
4297 /* If so, revert the change. */
4298 *from_p = old_from;
4300 else
4302 ret = GS_OK;
4303 changed = true;
4306 break;
4307 case INDIRECT_REF:
4309 /* If we have code like
4311 *(const A*)(A*)&x
4313 where the type of "x" is a (possibly cv-qualified variant
4314 of "A"), treat the entire expression as identical to "x".
4315 This kind of code arises in C++ when an object is bound
4316 to a const reference, and if "x" is a TARGET_EXPR we want
4317 to take advantage of the optimization below. */
4318 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4319 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4320 if (t)
4322 if (TREE_THIS_VOLATILE (t) != volatile_p)
4324 if (DECL_P (t))
4325 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4326 build_fold_addr_expr (t));
4327 if (REFERENCE_CLASS_P (t))
4328 TREE_THIS_VOLATILE (t) = volatile_p;
4330 *from_p = t;
4331 ret = GS_OK;
4332 changed = true;
4334 break;
4337 case TARGET_EXPR:
4339 /* If we are initializing something from a TARGET_EXPR, strip the
4340 TARGET_EXPR and initialize it directly, if possible. This can't
4341 be done if the initializer is void, since that implies that the
4342 temporary is set in some non-trivial way.
4344 ??? What about code that pulls out the temp and uses it
4345 elsewhere? I think that such code never uses the TARGET_EXPR as
4346 an initializer. If I'm wrong, we'll die because the temp won't
4347 have any RTL. In that case, I guess we'll need to replace
4348 references somehow. */
4349 tree init = TARGET_EXPR_INITIAL (*from_p);
4351 if (init
4352 && !VOID_TYPE_P (TREE_TYPE (init)))
4354 *from_p = init;
4355 ret = GS_OK;
4356 changed = true;
4359 break;
4361 case COMPOUND_EXPR:
4362 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4363 caught. */
4364 gimplify_compound_expr (from_p, pre_p, true);
4365 ret = GS_OK;
4366 changed = true;
4367 break;
4369 case CONSTRUCTOR:
4370 /* If we already made some changes, let the front end have a
4371 crack at this before we break it down. */
4372 if (ret != GS_UNHANDLED)
4373 break;
4374 /* If we're initializing from a CONSTRUCTOR, break this into
4375 individual MODIFY_EXPRs. */
4376 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4377 false);
4379 case COND_EXPR:
4380 /* If we're assigning to a non-register type, push the assignment
4381 down into the branches. This is mandatory for ADDRESSABLE types,
4382 since we cannot generate temporaries for such, but it saves a
4383 copy in other cases as well. */
4384 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4386 /* This code should mirror the code in gimplify_cond_expr. */
4387 enum tree_code code = TREE_CODE (*expr_p);
4388 tree cond = *from_p;
4389 tree result = *to_p;
4391 ret = gimplify_expr (&result, pre_p, post_p,
4392 is_gimple_lvalue, fb_lvalue);
4393 if (ret != GS_ERROR)
4394 ret = GS_OK;
4396 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4397 TREE_OPERAND (cond, 1)
4398 = build2 (code, void_type_node, result,
4399 TREE_OPERAND (cond, 1));
4400 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4401 TREE_OPERAND (cond, 2)
4402 = build2 (code, void_type_node, unshare_expr (result),
4403 TREE_OPERAND (cond, 2));
4405 TREE_TYPE (cond) = void_type_node;
4406 recalculate_side_effects (cond);
4408 if (want_value)
4410 gimplify_and_add (cond, pre_p);
4411 *expr_p = unshare_expr (result);
4413 else
4414 *expr_p = cond;
4415 return ret;
4417 break;
4419 case CALL_EXPR:
4420 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4421 return slot so that we don't generate a temporary. */
4422 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4423 && aggregate_value_p (*from_p, *from_p))
4425 bool use_target;
4427 if (!(rhs_predicate_for (*to_p))(*from_p))
4428 /* If we need a temporary, *to_p isn't accurate. */
4429 use_target = false;
4430 /* It's OK to use the return slot directly unless it's an NRV. */
4431 else if (TREE_CODE (*to_p) == RESULT_DECL
4432 && DECL_NAME (*to_p) == NULL_TREE
4433 && needs_to_live_in_memory (*to_p))
4434 use_target = true;
4435 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4436 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4437 /* Don't force regs into memory. */
4438 use_target = false;
4439 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4440 /* It's OK to use the target directly if it's being
4441 initialized. */
4442 use_target = true;
4443 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
4444 != INTEGER_CST)
4445 /* Always use the target and thus RSO for variable-sized types.
4446 GIMPLE cannot deal with a variable-sized assignment
4447 embedded in a call statement. */
4448 use_target = true;
4449 else if (TREE_CODE (*to_p) != SSA_NAME
4450 && (!is_gimple_variable (*to_p)
4451 || needs_to_live_in_memory (*to_p)))
4452 /* Don't use the original target if it's already addressable;
4453 if its address escapes, and the called function uses the
4454 NRV optimization, a conforming program could see *to_p
4455 change before the called function returns; see c++/19317.
4456 When optimizing, the return_slot pass marks more functions
4457 as safe after we have escape info. */
4458 use_target = false;
4459 else
4460 use_target = true;
4462 if (use_target)
4464 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4465 mark_addressable (*to_p);
4468 break;
4470 case WITH_SIZE_EXPR:
4471 /* Likewise for calls that return an aggregate of non-constant size,
4472 since we would not be able to generate a temporary at all. */
4473 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4475 *from_p = TREE_OPERAND (*from_p, 0);
4476 /* We don't change ret in this case because the
4477 WITH_SIZE_EXPR might have been added in
4478 gimplify_modify_expr, so returning GS_OK would lead to an
4479 infinite loop. */
4480 changed = true;
4482 break;
4484 /* If we're initializing from a container, push the initialization
4485 inside it. */
4486 case CLEANUP_POINT_EXPR:
4487 case BIND_EXPR:
4488 case STATEMENT_LIST:
4490 tree wrap = *from_p;
4491 tree t;
4493 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4494 fb_lvalue);
4495 if (ret != GS_ERROR)
4496 ret = GS_OK;
4498 t = voidify_wrapper_expr (wrap, *expr_p);
4499 gcc_assert (t == *expr_p);
4501 if (want_value)
4503 gimplify_and_add (wrap, pre_p);
4504 *expr_p = unshare_expr (*to_p);
4506 else
4507 *expr_p = wrap;
4508 return GS_OK;
4511 case COMPOUND_LITERAL_EXPR:
4513 tree complit = TREE_OPERAND (*expr_p, 1);
4514 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4515 tree decl = DECL_EXPR_DECL (decl_s);
4516 tree init = DECL_INITIAL (decl);
4518 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4519 into struct T x = { 0, 1, 2 } if the address of the
4520 compound literal has never been taken. */
4521 if (!TREE_ADDRESSABLE (complit)
4522 && !TREE_ADDRESSABLE (decl)
4523 && init)
4525 *expr_p = copy_node (*expr_p);
4526 TREE_OPERAND (*expr_p, 1) = init;
4527 return GS_OK;
4531 default:
4532 break;
4535 while (changed);
4537 return ret;
4541 /* Return true if T looks like a valid GIMPLE statement. */
4543 static bool
4544 is_gimple_stmt (tree t)
4546 const enum tree_code code = TREE_CODE (t);
4548 switch (code)
4550 case NOP_EXPR:
4551 /* The only valid NOP_EXPR is the empty statement. */
4552 return IS_EMPTY_STMT (t);
4554 case BIND_EXPR:
4555 case COND_EXPR:
4556 /* These are only valid if they're void. */
4557 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4559 case SWITCH_EXPR:
4560 case GOTO_EXPR:
4561 case RETURN_EXPR:
4562 case LABEL_EXPR:
4563 case CASE_LABEL_EXPR:
4564 case TRY_CATCH_EXPR:
4565 case TRY_FINALLY_EXPR:
4566 case EH_FILTER_EXPR:
4567 case CATCH_EXPR:
4568 case ASM_EXPR:
4569 case STATEMENT_LIST:
4570 case OACC_PARALLEL:
4571 case OACC_KERNELS:
4572 case OACC_DATA:
4573 case OACC_HOST_DATA:
4574 case OACC_DECLARE:
4575 case OACC_UPDATE:
4576 case OACC_ENTER_DATA:
4577 case OACC_EXIT_DATA:
4578 case OACC_CACHE:
4579 case OMP_PARALLEL:
4580 case OMP_FOR:
4581 case OMP_SIMD:
4582 case CILK_SIMD:
4583 case OMP_DISTRIBUTE:
4584 case OACC_LOOP:
4585 case OMP_SECTIONS:
4586 case OMP_SECTION:
4587 case OMP_SINGLE:
4588 case OMP_MASTER:
4589 case OMP_TASKGROUP:
4590 case OMP_ORDERED:
4591 case OMP_CRITICAL:
4592 case OMP_TASK:
4593 case OMP_TARGET:
4594 case OMP_TARGET_DATA:
4595 case OMP_TARGET_UPDATE:
4596 case OMP_TARGET_ENTER_DATA:
4597 case OMP_TARGET_EXIT_DATA:
4598 case OMP_TASKLOOP:
4599 case OMP_TEAMS:
4600 /* These are always void. */
4601 return true;
4603 case CALL_EXPR:
4604 case MODIFY_EXPR:
4605 case PREDICT_EXPR:
4606 /* These are valid regardless of their type. */
4607 return true;
4609 default:
4610 return false;
4615 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4616 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4617 DECL_GIMPLE_REG_P set.
4619 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4620 other, unmodified part of the complex object just before the total store.
4621 As a consequence, if the object is still uninitialized, an undefined value
4622 will be loaded into a register, which may result in a spurious exception
4623 if the register is floating-point and the value happens to be a signaling
4624 NaN for example. Then the fully-fledged complex operations lowering pass
4625 followed by a DCE pass are necessary in order to fix things up. */
4627 static enum gimplify_status
4628 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4629 bool want_value)
4631 enum tree_code code, ocode;
4632 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4634 lhs = TREE_OPERAND (*expr_p, 0);
4635 rhs = TREE_OPERAND (*expr_p, 1);
4636 code = TREE_CODE (lhs);
4637 lhs = TREE_OPERAND (lhs, 0);
4639 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4640 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4641 TREE_NO_WARNING (other) = 1;
4642 other = get_formal_tmp_var (other, pre_p);
4644 realpart = code == REALPART_EXPR ? rhs : other;
4645 imagpart = code == REALPART_EXPR ? other : rhs;
4647 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4648 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4649 else
4650 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4652 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4653 *expr_p = (want_value) ? rhs : NULL_TREE;
4655 return GS_ALL_DONE;
4658 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4660 modify_expr
4661 : varname '=' rhs
4662 | '*' ID '=' rhs
4664 PRE_P points to the list where side effects that must happen before
4665 *EXPR_P should be stored.
4667 POST_P points to the list where side effects that must happen after
4668 *EXPR_P should be stored.
4670 WANT_VALUE is nonzero iff we want to use the value of this expression
4671 in another expression. */
4673 static enum gimplify_status
4674 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4675 bool want_value)
4677 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4678 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4679 enum gimplify_status ret = GS_UNHANDLED;
4680 gimple *assign;
4681 location_t loc = EXPR_LOCATION (*expr_p);
4682 gimple_stmt_iterator gsi;
4684 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4685 || TREE_CODE (*expr_p) == INIT_EXPR);
4687 /* Trying to simplify a clobber using normal logic doesn't work,
4688 so handle it here. */
4689 if (TREE_CLOBBER_P (*from_p))
4691 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4692 if (ret == GS_ERROR)
4693 return ret;
4694 gcc_assert (!want_value
4695 && (TREE_CODE (*to_p) == VAR_DECL
4696 || TREE_CODE (*to_p) == MEM_REF));
4697 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4698 *expr_p = NULL;
4699 return GS_ALL_DONE;
4702 /* Insert pointer conversions required by the middle-end that are not
4703 required by the frontend. This fixes middle-end type checking for
4704 for example gcc.dg/redecl-6.c. */
4705 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4707 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4708 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4709 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4712 /* See if any simplifications can be done based on what the RHS is. */
4713 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4714 want_value);
4715 if (ret != GS_UNHANDLED)
4716 return ret;
4718 /* For zero sized types only gimplify the left hand side and right hand
4719 side as statements and throw away the assignment. Do this after
4720 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4721 types properly. */
4722 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4724 gimplify_stmt (from_p, pre_p);
4725 gimplify_stmt (to_p, pre_p);
4726 *expr_p = NULL_TREE;
4727 return GS_ALL_DONE;
4730 /* If the value being copied is of variable width, compute the length
4731 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4732 before gimplifying any of the operands so that we can resolve any
4733 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4734 the size of the expression to be copied, not of the destination, so
4735 that is what we must do here. */
4736 maybe_with_size_expr (from_p);
4738 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4739 if (ret == GS_ERROR)
4740 return ret;
4742 /* As a special case, we have to temporarily allow for assignments
4743 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4744 a toplevel statement, when gimplifying the GENERIC expression
4745 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4746 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4748 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4749 prevent gimplify_expr from trying to create a new temporary for
4750 foo's LHS, we tell it that it should only gimplify until it
4751 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4752 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4753 and all we need to do here is set 'a' to be its LHS. */
4754 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4755 fb_rvalue);
4756 if (ret == GS_ERROR)
4757 return ret;
4759 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
4760 size as argument to the call. */
4761 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4763 tree call = TREE_OPERAND (*from_p, 0);
4764 tree vlasize = TREE_OPERAND (*from_p, 1);
4766 if (TREE_CODE (call) == CALL_EXPR
4767 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
4769 int nargs = call_expr_nargs (call);
4770 tree type = TREE_TYPE (call);
4771 tree ap = CALL_EXPR_ARG (call, 0);
4772 tree tag = CALL_EXPR_ARG (call, 1);
4773 tree aptag = CALL_EXPR_ARG (call, 2);
4774 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
4775 IFN_VA_ARG, type,
4776 nargs + 1, ap, tag,
4777 aptag, vlasize);
4778 TREE_OPERAND (*from_p, 0) = newcall;
4782 /* Now see if the above changed *from_p to something we handle specially. */
4783 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4784 want_value);
4785 if (ret != GS_UNHANDLED)
4786 return ret;
4788 /* If we've got a variable sized assignment between two lvalues (i.e. does
4789 not involve a call), then we can make things a bit more straightforward
4790 by converting the assignment to memcpy or memset. */
4791 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4793 tree from = TREE_OPERAND (*from_p, 0);
4794 tree size = TREE_OPERAND (*from_p, 1);
4796 if (TREE_CODE (from) == CONSTRUCTOR)
4797 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4799 if (is_gimple_addressable (from))
4801 *from_p = from;
4802 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4803 pre_p);
4807 /* Transform partial stores to non-addressable complex variables into
4808 total stores. This allows us to use real instead of virtual operands
4809 for these variables, which improves optimization. */
4810 if ((TREE_CODE (*to_p) == REALPART_EXPR
4811 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4812 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4813 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4815 /* Try to alleviate the effects of the gimplification creating artificial
4816 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
4817 make sure not to create DECL_DEBUG_EXPR links across functions. */
4818 if (!gimplify_ctxp->into_ssa
4819 && TREE_CODE (*from_p) == VAR_DECL
4820 && DECL_IGNORED_P (*from_p)
4821 && DECL_P (*to_p)
4822 && !DECL_IGNORED_P (*to_p)
4823 && decl_function_context (*to_p) == current_function_decl)
4825 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4826 DECL_NAME (*from_p)
4827 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4828 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4829 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4832 if (want_value && TREE_THIS_VOLATILE (*to_p))
4833 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4835 if (TREE_CODE (*from_p) == CALL_EXPR)
4837 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4838 instead of a GIMPLE_ASSIGN. */
4839 gcall *call_stmt;
4840 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
4842 /* Gimplify internal functions created in the FEs. */
4843 int nargs = call_expr_nargs (*from_p), i;
4844 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
4845 auto_vec<tree> vargs (nargs);
4847 for (i = 0; i < nargs; i++)
4849 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
4850 EXPR_LOCATION (*from_p));
4851 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
4853 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
4854 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
4856 else
4858 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4859 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4860 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4861 tree fndecl = get_callee_fndecl (*from_p);
4862 if (fndecl
4863 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4864 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4865 && call_expr_nargs (*from_p) == 3)
4866 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4867 CALL_EXPR_ARG (*from_p, 0),
4868 CALL_EXPR_ARG (*from_p, 1),
4869 CALL_EXPR_ARG (*from_p, 2));
4870 else
4872 call_stmt = gimple_build_call_from_tree (*from_p);
4873 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
4876 notice_special_calls (call_stmt);
4877 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
4878 gimple_call_set_lhs (call_stmt, *to_p);
4879 else if (TREE_CODE (*to_p) == SSA_NAME)
4880 /* The above is somewhat premature, avoid ICEing later for a
4881 SSA name w/o a definition. We may have uses in the GIMPLE IL.
4882 ??? This doesn't make it a default-def. */
4883 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
4884 assign = call_stmt;
4886 else
4888 assign = gimple_build_assign (*to_p, *from_p);
4889 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4890 if (COMPARISON_CLASS_P (*from_p))
4891 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
4894 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4896 /* We should have got an SSA name from the start. */
4897 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
4898 || ! gimple_in_ssa_p (cfun));
4901 gimplify_seq_add_stmt (pre_p, assign);
4902 gsi = gsi_last (*pre_p);
4903 maybe_fold_stmt (&gsi);
4905 if (want_value)
4907 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4908 return GS_OK;
4910 else
4911 *expr_p = NULL;
4913 return GS_ALL_DONE;
4916 /* Gimplify a comparison between two variable-sized objects. Do this
4917 with a call to BUILT_IN_MEMCMP. */
4919 static enum gimplify_status
4920 gimplify_variable_sized_compare (tree *expr_p)
4922 location_t loc = EXPR_LOCATION (*expr_p);
4923 tree op0 = TREE_OPERAND (*expr_p, 0);
4924 tree op1 = TREE_OPERAND (*expr_p, 1);
4925 tree t, arg, dest, src, expr;
4927 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4928 arg = unshare_expr (arg);
4929 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4930 src = build_fold_addr_expr_loc (loc, op1);
4931 dest = build_fold_addr_expr_loc (loc, op0);
4932 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4933 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4935 expr
4936 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4937 SET_EXPR_LOCATION (expr, loc);
4938 *expr_p = expr;
4940 return GS_OK;
4943 /* Gimplify a comparison between two aggregate objects of integral scalar
4944 mode as a comparison between the bitwise equivalent scalar values. */
4946 static enum gimplify_status
4947 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4949 location_t loc = EXPR_LOCATION (*expr_p);
4950 tree op0 = TREE_OPERAND (*expr_p, 0);
4951 tree op1 = TREE_OPERAND (*expr_p, 1);
4953 tree type = TREE_TYPE (op0);
4954 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4956 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4957 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4959 *expr_p
4960 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4962 return GS_OK;
4965 /* Gimplify an expression sequence. This function gimplifies each
4966 expression and rewrites the original expression with the last
4967 expression of the sequence in GIMPLE form.
4969 PRE_P points to the list where the side effects for all the
4970 expressions in the sequence will be emitted.
4972 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4974 static enum gimplify_status
4975 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4977 tree t = *expr_p;
4981 tree *sub_p = &TREE_OPERAND (t, 0);
4983 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4984 gimplify_compound_expr (sub_p, pre_p, false);
4985 else
4986 gimplify_stmt (sub_p, pre_p);
4988 t = TREE_OPERAND (t, 1);
4990 while (TREE_CODE (t) == COMPOUND_EXPR);
4992 *expr_p = t;
4993 if (want_value)
4994 return GS_OK;
4995 else
4997 gimplify_stmt (expr_p, pre_p);
4998 return GS_ALL_DONE;
5002 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5003 gimplify. After gimplification, EXPR_P will point to a new temporary
5004 that holds the original value of the SAVE_EXPR node.
5006 PRE_P points to the list where side effects that must happen before
5007 *EXPR_P should be stored. */
5009 static enum gimplify_status
5010 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5012 enum gimplify_status ret = GS_ALL_DONE;
5013 tree val;
5015 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5016 val = TREE_OPERAND (*expr_p, 0);
5018 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5019 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5021 /* The operand may be a void-valued expression such as SAVE_EXPRs
5022 generated by the Java frontend for class initialization. It is
5023 being executed only for its side-effects. */
5024 if (TREE_TYPE (val) == void_type_node)
5026 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5027 is_gimple_stmt, fb_none);
5028 val = NULL;
5030 else
5031 /* The temporary may not be an SSA name as later abnormal and EH
5032 control flow may invalidate use/def domination. */
5033 val = get_initialized_tmp_var (val, pre_p, post_p, false);
5035 TREE_OPERAND (*expr_p, 0) = val;
5036 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5039 *expr_p = val;
5041 return ret;
5044 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5046 unary_expr
5047 : ...
5048 | '&' varname
5051 PRE_P points to the list where side effects that must happen before
5052 *EXPR_P should be stored.
5054 POST_P points to the list where side effects that must happen after
5055 *EXPR_P should be stored. */
5057 static enum gimplify_status
5058 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5060 tree expr = *expr_p;
5061 tree op0 = TREE_OPERAND (expr, 0);
5062 enum gimplify_status ret;
5063 location_t loc = EXPR_LOCATION (*expr_p);
5065 switch (TREE_CODE (op0))
5067 case INDIRECT_REF:
5068 do_indirect_ref:
5069 /* Check if we are dealing with an expression of the form '&*ptr'.
5070 While the front end folds away '&*ptr' into 'ptr', these
5071 expressions may be generated internally by the compiler (e.g.,
5072 builtins like __builtin_va_end). */
5073 /* Caution: the silent array decomposition semantics we allow for
5074 ADDR_EXPR means we can't always discard the pair. */
5075 /* Gimplification of the ADDR_EXPR operand may drop
5076 cv-qualification conversions, so make sure we add them if
5077 needed. */
5079 tree op00 = TREE_OPERAND (op0, 0);
5080 tree t_expr = TREE_TYPE (expr);
5081 tree t_op00 = TREE_TYPE (op00);
5083 if (!useless_type_conversion_p (t_expr, t_op00))
5084 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5085 *expr_p = op00;
5086 ret = GS_OK;
5088 break;
5090 case VIEW_CONVERT_EXPR:
5091 /* Take the address of our operand and then convert it to the type of
5092 this ADDR_EXPR.
5094 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5095 all clear. The impact of this transformation is even less clear. */
5097 /* If the operand is a useless conversion, look through it. Doing so
5098 guarantees that the ADDR_EXPR and its operand will remain of the
5099 same type. */
5100 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5101 op0 = TREE_OPERAND (op0, 0);
5103 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5104 build_fold_addr_expr_loc (loc,
5105 TREE_OPERAND (op0, 0)));
5106 ret = GS_OK;
5107 break;
5109 case MEM_REF:
5110 if (integer_zerop (TREE_OPERAND (op0, 1)))
5111 goto do_indirect_ref;
5113 /* ... fall through ... */
5115 default:
5116 /* If we see a call to a declared builtin or see its address
5117 being taken (we can unify those cases here) then we can mark
5118 the builtin for implicit generation by GCC. */
5119 if (TREE_CODE (op0) == FUNCTION_DECL
5120 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5121 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5122 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5124 /* We use fb_either here because the C frontend sometimes takes
5125 the address of a call that returns a struct; see
5126 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5127 the implied temporary explicit. */
5129 /* Make the operand addressable. */
5130 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5131 is_gimple_addressable, fb_either);
5132 if (ret == GS_ERROR)
5133 break;
5135 /* Then mark it. Beware that it may not be possible to do so directly
5136 if a temporary has been created by the gimplification. */
5137 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5139 op0 = TREE_OPERAND (expr, 0);
5141 /* For various reasons, the gimplification of the expression
5142 may have made a new INDIRECT_REF. */
5143 if (TREE_CODE (op0) == INDIRECT_REF)
5144 goto do_indirect_ref;
5146 mark_addressable (TREE_OPERAND (expr, 0));
5148 /* The FEs may end up building ADDR_EXPRs early on a decl with
5149 an incomplete type. Re-build ADDR_EXPRs in canonical form
5150 here. */
5151 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5152 *expr_p = build_fold_addr_expr (op0);
5154 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5155 recompute_tree_invariant_for_addr_expr (*expr_p);
5157 /* If we re-built the ADDR_EXPR add a conversion to the original type
5158 if required. */
5159 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5160 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5162 break;
5165 return ret;
5168 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5169 value; output operands should be a gimple lvalue. */
5171 static enum gimplify_status
5172 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5174 tree expr;
5175 int noutputs;
5176 const char **oconstraints;
5177 int i;
5178 tree link;
5179 const char *constraint;
5180 bool allows_mem, allows_reg, is_inout;
5181 enum gimplify_status ret, tret;
5182 gasm *stmt;
5183 vec<tree, va_gc> *inputs;
5184 vec<tree, va_gc> *outputs;
5185 vec<tree, va_gc> *clobbers;
5186 vec<tree, va_gc> *labels;
5187 tree link_next;
5189 expr = *expr_p;
5190 noutputs = list_length (ASM_OUTPUTS (expr));
5191 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5193 inputs = NULL;
5194 outputs = NULL;
5195 clobbers = NULL;
5196 labels = NULL;
5198 ret = GS_ALL_DONE;
5199 link_next = NULL_TREE;
5200 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5202 bool ok;
5203 size_t constraint_len;
5205 link_next = TREE_CHAIN (link);
5207 oconstraints[i]
5208 = constraint
5209 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5210 constraint_len = strlen (constraint);
5211 if (constraint_len == 0)
5212 continue;
5214 ok = parse_output_constraint (&constraint, i, 0, 0,
5215 &allows_mem, &allows_reg, &is_inout);
5216 if (!ok)
5218 ret = GS_ERROR;
5219 is_inout = false;
5222 if (!allows_reg && allows_mem)
5223 mark_addressable (TREE_VALUE (link));
5225 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5226 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5227 fb_lvalue | fb_mayfail);
5228 if (tret == GS_ERROR)
5230 error ("invalid lvalue in asm output %d", i);
5231 ret = tret;
5234 /* If the constraint does not allow memory make sure we gimplify
5235 it to a register if it is not already but its base is. This
5236 happens for complex and vector components. */
5237 if (!allows_mem)
5239 tree op = TREE_VALUE (link);
5240 if (! is_gimple_val (op)
5241 && is_gimple_reg_type (TREE_TYPE (op))
5242 && is_gimple_reg (get_base_address (op)))
5244 tree tem = create_tmp_reg (TREE_TYPE (op));
5245 tree ass;
5246 if (is_inout)
5248 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
5249 tem, unshare_expr (op));
5250 gimplify_and_add (ass, pre_p);
5252 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
5253 gimplify_and_add (ass, post_p);
5255 TREE_VALUE (link) = tem;
5256 tret = GS_OK;
5260 vec_safe_push (outputs, link);
5261 TREE_CHAIN (link) = NULL_TREE;
5263 if (is_inout)
5265 /* An input/output operand. To give the optimizers more
5266 flexibility, split it into separate input and output
5267 operands. */
5268 tree input;
5269 char buf[10];
5271 /* Turn the in/out constraint into an output constraint. */
5272 char *p = xstrdup (constraint);
5273 p[0] = '=';
5274 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5276 /* And add a matching input constraint. */
5277 if (allows_reg)
5279 sprintf (buf, "%d", i);
5281 /* If there are multiple alternatives in the constraint,
5282 handle each of them individually. Those that allow register
5283 will be replaced with operand number, the others will stay
5284 unchanged. */
5285 if (strchr (p, ',') != NULL)
5287 size_t len = 0, buflen = strlen (buf);
5288 char *beg, *end, *str, *dst;
5290 for (beg = p + 1;;)
5292 end = strchr (beg, ',');
5293 if (end == NULL)
5294 end = strchr (beg, '\0');
5295 if ((size_t) (end - beg) < buflen)
5296 len += buflen + 1;
5297 else
5298 len += end - beg + 1;
5299 if (*end)
5300 beg = end + 1;
5301 else
5302 break;
5305 str = (char *) alloca (len);
5306 for (beg = p + 1, dst = str;;)
5308 const char *tem;
5309 bool mem_p, reg_p, inout_p;
5311 end = strchr (beg, ',');
5312 if (end)
5313 *end = '\0';
5314 beg[-1] = '=';
5315 tem = beg - 1;
5316 parse_output_constraint (&tem, i, 0, 0,
5317 &mem_p, &reg_p, &inout_p);
5318 if (dst != str)
5319 *dst++ = ',';
5320 if (reg_p)
5322 memcpy (dst, buf, buflen);
5323 dst += buflen;
5325 else
5327 if (end)
5328 len = end - beg;
5329 else
5330 len = strlen (beg);
5331 memcpy (dst, beg, len);
5332 dst += len;
5334 if (end)
5335 beg = end + 1;
5336 else
5337 break;
5339 *dst = '\0';
5340 input = build_string (dst - str, str);
5342 else
5343 input = build_string (strlen (buf), buf);
5345 else
5346 input = build_string (constraint_len - 1, constraint + 1);
5348 free (p);
5350 input = build_tree_list (build_tree_list (NULL_TREE, input),
5351 unshare_expr (TREE_VALUE (link)));
5352 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5356 link_next = NULL_TREE;
5357 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5359 link_next = TREE_CHAIN (link);
5360 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5361 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5362 oconstraints, &allows_mem, &allows_reg);
5364 /* If we can't make copies, we can only accept memory. */
5365 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5367 if (allows_mem)
5368 allows_reg = 0;
5369 else
5371 error ("impossible constraint in %<asm%>");
5372 error ("non-memory input %d must stay in memory", i);
5373 return GS_ERROR;
5377 /* If the operand is a memory input, it should be an lvalue. */
5378 if (!allows_reg && allows_mem)
5380 tree inputv = TREE_VALUE (link);
5381 STRIP_NOPS (inputv);
5382 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5383 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5384 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5385 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
5386 || TREE_CODE (inputv) == MODIFY_EXPR)
5387 TREE_VALUE (link) = error_mark_node;
5388 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5389 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5390 if (tret != GS_ERROR)
5392 /* Unlike output operands, memory inputs are not guaranteed
5393 to be lvalues by the FE, and while the expressions are
5394 marked addressable there, if it is e.g. a statement
5395 expression, temporaries in it might not end up being
5396 addressable. They might be already used in the IL and thus
5397 it is too late to make them addressable now though. */
5398 tree x = TREE_VALUE (link);
5399 while (handled_component_p (x))
5400 x = TREE_OPERAND (x, 0);
5401 if (TREE_CODE (x) == MEM_REF
5402 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
5403 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
5404 if ((TREE_CODE (x) == VAR_DECL
5405 || TREE_CODE (x) == PARM_DECL
5406 || TREE_CODE (x) == RESULT_DECL)
5407 && !TREE_ADDRESSABLE (x)
5408 && is_gimple_reg (x))
5410 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
5411 input_location), 0,
5412 "memory input %d is not directly addressable",
5414 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
5417 mark_addressable (TREE_VALUE (link));
5418 if (tret == GS_ERROR)
5420 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
5421 "memory input %d is not directly addressable", i);
5422 ret = tret;
5425 else
5427 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5428 is_gimple_asm_val, fb_rvalue);
5429 if (tret == GS_ERROR)
5430 ret = tret;
5433 TREE_CHAIN (link) = NULL_TREE;
5434 vec_safe_push (inputs, link);
5437 link_next = NULL_TREE;
5438 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5440 link_next = TREE_CHAIN (link);
5441 TREE_CHAIN (link) = NULL_TREE;
5442 vec_safe_push (clobbers, link);
5445 link_next = NULL_TREE;
5446 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5448 link_next = TREE_CHAIN (link);
5449 TREE_CHAIN (link) = NULL_TREE;
5450 vec_safe_push (labels, link);
5453 /* Do not add ASMs with errors to the gimple IL stream. */
5454 if (ret != GS_ERROR)
5456 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5457 inputs, outputs, clobbers, labels);
5459 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
5460 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5462 gimplify_seq_add_stmt (pre_p, stmt);
5465 return ret;
5468 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5469 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5470 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5471 return to this function.
5473 FIXME should we complexify the prequeue handling instead? Or use flags
5474 for all the cleanups and let the optimizer tighten them up? The current
5475 code seems pretty fragile; it will break on a cleanup within any
5476 non-conditional nesting. But any such nesting would be broken, anyway;
5477 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5478 and continues out of it. We can do that at the RTL level, though, so
5479 having an optimizer to tighten up try/finally regions would be a Good
5480 Thing. */
5482 static enum gimplify_status
5483 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5485 gimple_stmt_iterator iter;
5486 gimple_seq body_sequence = NULL;
5488 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5490 /* We only care about the number of conditions between the innermost
5491 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5492 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5493 int old_conds = gimplify_ctxp->conditions;
5494 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5495 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5496 gimplify_ctxp->conditions = 0;
5497 gimplify_ctxp->conditional_cleanups = NULL;
5498 gimplify_ctxp->in_cleanup_point_expr = true;
5500 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5502 gimplify_ctxp->conditions = old_conds;
5503 gimplify_ctxp->conditional_cleanups = old_cleanups;
5504 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5506 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5508 gimple *wce = gsi_stmt (iter);
5510 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5512 if (gsi_one_before_end_p (iter))
5514 /* Note that gsi_insert_seq_before and gsi_remove do not
5515 scan operands, unlike some other sequence mutators. */
5516 if (!gimple_wce_cleanup_eh_only (wce))
5517 gsi_insert_seq_before_without_update (&iter,
5518 gimple_wce_cleanup (wce),
5519 GSI_SAME_STMT);
5520 gsi_remove (&iter, true);
5521 break;
5523 else
5525 gtry *gtry;
5526 gimple_seq seq;
5527 enum gimple_try_flags kind;
5529 if (gimple_wce_cleanup_eh_only (wce))
5530 kind = GIMPLE_TRY_CATCH;
5531 else
5532 kind = GIMPLE_TRY_FINALLY;
5533 seq = gsi_split_seq_after (iter);
5535 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5536 /* Do not use gsi_replace here, as it may scan operands.
5537 We want to do a simple structural modification only. */
5538 gsi_set_stmt (&iter, gtry);
5539 iter = gsi_start (gtry->eval);
5542 else
5543 gsi_next (&iter);
5546 gimplify_seq_add_seq (pre_p, body_sequence);
5547 if (temp)
5549 *expr_p = temp;
5550 return GS_OK;
5552 else
5554 *expr_p = NULL;
5555 return GS_ALL_DONE;
5559 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5560 is the cleanup action required. EH_ONLY is true if the cleanup should
5561 only be executed if an exception is thrown, not on normal exit. */
5563 static void
5564 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5566 gimple *wce;
5567 gimple_seq cleanup_stmts = NULL;
5569 /* Errors can result in improperly nested cleanups. Which results in
5570 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5571 if (seen_error ())
5572 return;
5574 if (gimple_conditional_context ())
5576 /* If we're in a conditional context, this is more complex. We only
5577 want to run the cleanup if we actually ran the initialization that
5578 necessitates it, but we want to run it after the end of the
5579 conditional context. So we wrap the try/finally around the
5580 condition and use a flag to determine whether or not to actually
5581 run the destructor. Thus
5583 test ? f(A()) : 0
5585 becomes (approximately)
5587 flag = 0;
5588 try {
5589 if (test) { A::A(temp); flag = 1; val = f(temp); }
5590 else { val = 0; }
5591 } finally {
5592 if (flag) A::~A(temp);
5596 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5597 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
5598 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
5600 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5601 gimplify_stmt (&cleanup, &cleanup_stmts);
5602 wce = gimple_build_wce (cleanup_stmts);
5604 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5605 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5606 gimplify_seq_add_stmt (pre_p, ftrue);
5608 /* Because of this manipulation, and the EH edges that jump
5609 threading cannot redirect, the temporary (VAR) will appear
5610 to be used uninitialized. Don't warn. */
5611 TREE_NO_WARNING (var) = 1;
5613 else
5615 gimplify_stmt (&cleanup, &cleanup_stmts);
5616 wce = gimple_build_wce (cleanup_stmts);
5617 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5618 gimplify_seq_add_stmt (pre_p, wce);
5622 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5624 static enum gimplify_status
5625 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5627 tree targ = *expr_p;
5628 tree temp = TARGET_EXPR_SLOT (targ);
5629 tree init = TARGET_EXPR_INITIAL (targ);
5630 enum gimplify_status ret;
5632 if (init)
5634 tree cleanup = NULL_TREE;
5636 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5637 to the temps list. Handle also variable length TARGET_EXPRs. */
5638 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5640 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5641 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5642 gimplify_vla_decl (temp, pre_p);
5644 else
5645 gimple_add_tmp_var (temp);
5647 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5648 expression is supposed to initialize the slot. */
5649 if (VOID_TYPE_P (TREE_TYPE (init)))
5650 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5651 else
5653 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5654 init = init_expr;
5655 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5656 init = NULL;
5657 ggc_free (init_expr);
5659 if (ret == GS_ERROR)
5661 /* PR c++/28266 Make sure this is expanded only once. */
5662 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5663 return GS_ERROR;
5665 if (init)
5666 gimplify_and_add (init, pre_p);
5668 /* If needed, push the cleanup for the temp. */
5669 if (TARGET_EXPR_CLEANUP (targ))
5671 if (CLEANUP_EH_ONLY (targ))
5672 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5673 CLEANUP_EH_ONLY (targ), pre_p);
5674 else
5675 cleanup = TARGET_EXPR_CLEANUP (targ);
5678 /* Add a clobber for the temporary going out of scope, like
5679 gimplify_bind_expr. */
5680 if (gimplify_ctxp->in_cleanup_point_expr
5681 && needs_to_live_in_memory (temp)
5682 && flag_stack_reuse == SR_ALL)
5684 tree clobber = build_constructor (TREE_TYPE (temp),
5685 NULL);
5686 TREE_THIS_VOLATILE (clobber) = true;
5687 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5688 if (cleanup)
5689 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5690 clobber);
5691 else
5692 cleanup = clobber;
5695 if (cleanup)
5696 gimple_push_cleanup (temp, cleanup, false, pre_p);
5698 /* Only expand this once. */
5699 TREE_OPERAND (targ, 3) = init;
5700 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5702 else
5703 /* We should have expanded this before. */
5704 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5706 *expr_p = temp;
5707 return GS_OK;
5710 /* Gimplification of expression trees. */
5712 /* Gimplify an expression which appears at statement context. The
5713 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5714 NULL, a new sequence is allocated.
5716 Return true if we actually added a statement to the queue. */
5718 bool
5719 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5721 gimple_seq_node last;
5723 last = gimple_seq_last (*seq_p);
5724 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5725 return last != gimple_seq_last (*seq_p);
5728 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5729 to CTX. If entries already exist, force them to be some flavor of private.
5730 If there is no enclosing parallel, do nothing. */
5732 void
5733 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5735 splay_tree_node n;
5737 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
5738 return;
5742 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5743 if (n != NULL)
5745 if (n->value & GOVD_SHARED)
5746 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5747 else if (n->value & GOVD_MAP)
5748 n->value |= GOVD_MAP_TO_ONLY;
5749 else
5750 return;
5752 else if ((ctx->region_type & ORT_TARGET) != 0)
5754 if (ctx->target_map_scalars_firstprivate)
5755 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5756 else
5757 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5759 else if (ctx->region_type != ORT_WORKSHARE
5760 && ctx->region_type != ORT_SIMD
5761 && ctx->region_type != ORT_ACC
5762 && !(ctx->region_type & ORT_TARGET_DATA))
5763 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5765 ctx = ctx->outer_context;
5767 while (ctx);
5770 /* Similarly for each of the type sizes of TYPE. */
5772 static void
5773 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5775 if (type == NULL || type == error_mark_node)
5776 return;
5777 type = TYPE_MAIN_VARIANT (type);
5779 if (ctx->privatized_types->add (type))
5780 return;
5782 switch (TREE_CODE (type))
5784 case INTEGER_TYPE:
5785 case ENUMERAL_TYPE:
5786 case BOOLEAN_TYPE:
5787 case REAL_TYPE:
5788 case FIXED_POINT_TYPE:
5789 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5790 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5791 break;
5793 case ARRAY_TYPE:
5794 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5795 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5796 break;
5798 case RECORD_TYPE:
5799 case UNION_TYPE:
5800 case QUAL_UNION_TYPE:
5802 tree field;
5803 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5804 if (TREE_CODE (field) == FIELD_DECL)
5806 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5807 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5810 break;
5812 case POINTER_TYPE:
5813 case REFERENCE_TYPE:
5814 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5815 break;
5817 default:
5818 break;
5821 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5822 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5823 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5826 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
5828 static void
5829 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5831 splay_tree_node n;
5832 unsigned int nflags;
5833 tree t;
5835 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
5836 return;
5838 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5839 there are constructors involved somewhere. */
5840 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5841 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5842 flags |= GOVD_SEEN;
5844 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5845 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
5847 /* We shouldn't be re-adding the decl with the same data
5848 sharing class. */
5849 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5850 nflags = n->value | flags;
5851 /* The only combination of data sharing classes we should see is
5852 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
5853 reduction variables to be used in data sharing clauses. */
5854 gcc_assert ((ctx->region_type & ORT_ACC) != 0
5855 || ((nflags & GOVD_DATA_SHARE_CLASS)
5856 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
5857 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5858 n->value = nflags;
5859 return;
5862 /* When adding a variable-sized variable, we have to handle all sorts
5863 of additional bits of data: the pointer replacement variable, and
5864 the parameters of the type. */
5865 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5867 /* Add the pointer replacement variable as PRIVATE if the variable
5868 replacement is private, else FIRSTPRIVATE since we'll need the
5869 address of the original variable either for SHARED, or for the
5870 copy into or out of the context. */
5871 if (!(flags & GOVD_LOCAL))
5873 if (flags & GOVD_MAP)
5874 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
5875 else if (flags & GOVD_PRIVATE)
5876 nflags = GOVD_PRIVATE;
5877 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
5878 && (flags & GOVD_FIRSTPRIVATE))
5879 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
5880 else
5881 nflags = GOVD_FIRSTPRIVATE;
5882 nflags |= flags & GOVD_SEEN;
5883 t = DECL_VALUE_EXPR (decl);
5884 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5885 t = TREE_OPERAND (t, 0);
5886 gcc_assert (DECL_P (t));
5887 omp_add_variable (ctx, t, nflags);
5890 /* Add all of the variable and type parameters (which should have
5891 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5892 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5893 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5894 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5896 /* The variable-sized variable itself is never SHARED, only some form
5897 of PRIVATE. The sharing would take place via the pointer variable
5898 which we remapped above. */
5899 if (flags & GOVD_SHARED)
5900 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5901 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5903 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5904 alloca statement we generate for the variable, so make sure it
5905 is available. This isn't automatically needed for the SHARED
5906 case, since we won't be allocating local storage then.
5907 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5908 in this case omp_notice_variable will be called later
5909 on when it is gimplified. */
5910 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5911 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5912 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5914 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5915 && lang_hooks.decls.omp_privatize_by_reference (decl))
5917 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5919 /* Similar to the direct variable sized case above, we'll need the
5920 size of references being privatized. */
5921 if ((flags & GOVD_SHARED) == 0)
5923 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5924 if (DECL_P (t))
5925 omp_notice_variable (ctx, t, true);
5929 if (n != NULL)
5930 n->value |= flags;
5931 else
5932 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5935 /* Notice a threadprivate variable DECL used in OMP context CTX.
5936 This just prints out diagnostics about threadprivate variable uses
5937 in untied tasks. If DECL2 is non-NULL, prevent this warning
5938 on that variable. */
5940 static bool
5941 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5942 tree decl2)
5944 splay_tree_node n;
5945 struct gimplify_omp_ctx *octx;
5947 for (octx = ctx; octx; octx = octx->outer_context)
5948 if ((octx->region_type & ORT_TARGET) != 0)
5950 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5951 if (n == NULL)
5953 error ("threadprivate variable %qE used in target region",
5954 DECL_NAME (decl));
5955 error_at (octx->location, "enclosing target region");
5956 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5958 if (decl2)
5959 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5962 if (ctx->region_type != ORT_UNTIED_TASK)
5963 return false;
5964 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5965 if (n == NULL)
5967 error ("threadprivate variable %qE used in untied task",
5968 DECL_NAME (decl));
5969 error_at (ctx->location, "enclosing task");
5970 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5972 if (decl2)
5973 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5974 return false;
5977 /* Return true if global var DECL is device resident. */
5979 static bool
5980 device_resident_p (tree decl)
5982 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
5984 if (!attr)
5985 return false;
5987 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
5989 tree c = TREE_VALUE (t);
5990 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
5991 return true;
5994 return false;
5997 /* Determine outer default flags for DECL mentioned in an OMP region
5998 but not declared in an enclosing clause.
6000 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6001 remapped firstprivate instead of shared. To some extent this is
6002 addressed in omp_firstprivatize_type_sizes, but not
6003 effectively. */
6005 static unsigned
6006 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6007 bool in_code, unsigned flags)
6009 enum omp_clause_default_kind default_kind = ctx->default_kind;
6010 enum omp_clause_default_kind kind;
6012 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6013 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6014 default_kind = kind;
6016 switch (default_kind)
6018 case OMP_CLAUSE_DEFAULT_NONE:
6020 const char *rtype;
6022 if (ctx->region_type & ORT_PARALLEL)
6023 rtype = "parallel";
6024 else if (ctx->region_type & ORT_TASK)
6025 rtype = "task";
6026 else if (ctx->region_type & ORT_TEAMS)
6027 rtype = "teams";
6028 else
6029 gcc_unreachable ();
6031 error ("%qE not specified in enclosing %s",
6032 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6033 error_at (ctx->location, "enclosing %s", rtype);
6035 /* FALLTHRU */
6036 case OMP_CLAUSE_DEFAULT_SHARED:
6037 flags |= GOVD_SHARED;
6038 break;
6039 case OMP_CLAUSE_DEFAULT_PRIVATE:
6040 flags |= GOVD_PRIVATE;
6041 break;
6042 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6043 flags |= GOVD_FIRSTPRIVATE;
6044 break;
6045 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6046 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6047 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6048 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6050 omp_notice_variable (octx, decl, in_code);
6051 for (; octx; octx = octx->outer_context)
6053 splay_tree_node n2;
6055 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6056 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6057 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6058 continue;
6059 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6061 flags |= GOVD_FIRSTPRIVATE;
6062 goto found_outer;
6064 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6066 flags |= GOVD_SHARED;
6067 goto found_outer;
6072 if (TREE_CODE (decl) == PARM_DECL
6073 || (!is_global_var (decl)
6074 && DECL_CONTEXT (decl) == current_function_decl))
6075 flags |= GOVD_FIRSTPRIVATE;
6076 else
6077 flags |= GOVD_SHARED;
6078 found_outer:
6079 break;
6081 default:
6082 gcc_unreachable ();
6085 return flags;
6089 /* Determine outer default flags for DECL mentioned in an OACC region
6090 but not declared in an enclosing clause. */
6092 static unsigned
6093 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6095 const char *rkind;
6096 bool on_device = false;
6097 tree type = TREE_TYPE (decl);
6099 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6100 type = TREE_TYPE (type);
6102 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6103 && is_global_var (decl)
6104 && device_resident_p (decl))
6106 on_device = true;
6107 flags |= GOVD_MAP_TO_ONLY;
6110 switch (ctx->region_type)
6112 default:
6113 gcc_unreachable ();
6115 case ORT_ACC_KERNELS:
6116 /* Scalars are default 'copy' under kernels, non-scalars are default
6117 'present_or_copy'. */
6118 flags |= GOVD_MAP;
6119 if (!AGGREGATE_TYPE_P (type))
6120 flags |= GOVD_MAP_FORCE;
6122 rkind = "kernels";
6123 break;
6125 case ORT_ACC_PARALLEL:
6127 if (on_device || AGGREGATE_TYPE_P (type))
6128 /* Aggregates default to 'present_or_copy'. */
6129 flags |= GOVD_MAP;
6130 else
6131 /* Scalars default to 'firstprivate'. */
6132 flags |= GOVD_FIRSTPRIVATE;
6133 rkind = "parallel";
6135 break;
6138 if (DECL_ARTIFICIAL (decl))
6139 ; /* We can get compiler-generated decls, and should not complain
6140 about them. */
6141 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
6143 error ("%qE not specified in enclosing OpenACC %qs construct",
6144 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
6145 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
6147 else
6148 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
6150 return flags;
6153 /* Record the fact that DECL was used within the OMP context CTX.
6154 IN_CODE is true when real code uses DECL, and false when we should
6155 merely emit default(none) errors. Return true if DECL is going to
6156 be remapped and thus DECL shouldn't be gimplified into its
6157 DECL_VALUE_EXPR (if any). */
6159 static bool
6160 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
6162 splay_tree_node n;
6163 unsigned flags = in_code ? GOVD_SEEN : 0;
6164 bool ret = false, shared;
6166 if (error_operand_p (decl))
6167 return false;
6169 if (ctx->region_type == ORT_NONE)
6170 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
6172 if (is_global_var (decl))
6174 /* Threadprivate variables are predetermined. */
6175 if (DECL_THREAD_LOCAL_P (decl))
6176 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
6178 if (DECL_HAS_VALUE_EXPR_P (decl))
6180 tree value = get_base_address (DECL_VALUE_EXPR (decl));
6182 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
6183 return omp_notice_threadprivate_variable (ctx, decl, value);
6186 if (gimplify_omp_ctxp->outer_context == NULL
6187 && VAR_P (decl)
6188 && get_oacc_fn_attrib (current_function_decl))
6190 location_t loc = DECL_SOURCE_LOCATION (decl);
6192 if (lookup_attribute ("omp declare target link",
6193 DECL_ATTRIBUTES (decl)))
6195 error_at (loc,
6196 "%qE with %<link%> clause used in %<routine%> function",
6197 DECL_NAME (decl));
6198 return false;
6200 else if (!lookup_attribute ("omp declare target",
6201 DECL_ATTRIBUTES (decl)))
6203 error_at (loc,
6204 "%qE requires a %<declare%> directive for use "
6205 "in a %<routine%> function", DECL_NAME (decl));
6206 return false;
6211 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6212 if ((ctx->region_type & ORT_TARGET) != 0)
6214 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
6215 if (n == NULL)
6217 unsigned nflags = flags;
6218 if (ctx->target_map_pointers_as_0len_arrays
6219 || ctx->target_map_scalars_firstprivate)
6221 bool is_declare_target = false;
6222 bool is_scalar = false;
6223 if (is_global_var (decl)
6224 && varpool_node::get_create (decl)->offloadable)
6226 struct gimplify_omp_ctx *octx;
6227 for (octx = ctx->outer_context;
6228 octx; octx = octx->outer_context)
6230 n = splay_tree_lookup (octx->variables,
6231 (splay_tree_key)decl);
6232 if (n
6233 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
6234 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6235 break;
6237 is_declare_target = octx == NULL;
6239 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
6241 tree type = TREE_TYPE (decl);
6242 if (TREE_CODE (type) == REFERENCE_TYPE)
6243 type = TREE_TYPE (type);
6244 if (TREE_CODE (type) == COMPLEX_TYPE)
6245 type = TREE_TYPE (type);
6246 if (INTEGRAL_TYPE_P (type)
6247 || SCALAR_FLOAT_TYPE_P (type)
6248 || TREE_CODE (type) == POINTER_TYPE)
6249 is_scalar = true;
6251 if (is_declare_target)
6253 else if (ctx->target_map_pointers_as_0len_arrays
6254 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
6255 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
6256 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
6257 == POINTER_TYPE)))
6258 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
6259 else if (is_scalar)
6260 nflags |= GOVD_FIRSTPRIVATE;
6263 struct gimplify_omp_ctx *octx = ctx->outer_context;
6264 if ((ctx->region_type & ORT_ACC) && octx)
6266 /* Look in outer OpenACC contexts, to see if there's a
6267 data attribute for this variable. */
6268 omp_notice_variable (octx, decl, in_code);
6270 for (; octx; octx = octx->outer_context)
6272 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
6273 break;
6274 splay_tree_node n2
6275 = splay_tree_lookup (octx->variables,
6276 (splay_tree_key) decl);
6277 if (n2)
6279 if (octx->region_type == ORT_ACC_HOST_DATA)
6280 error ("variable %qE declared in enclosing "
6281 "%<host_data%> region", DECL_NAME (decl));
6282 nflags |= GOVD_MAP;
6283 if (octx->region_type == ORT_ACC_DATA
6284 && (n2->value & GOVD_MAP_0LEN_ARRAY))
6285 nflags |= GOVD_MAP_0LEN_ARRAY;
6286 goto found_outer;
6292 tree type = TREE_TYPE (decl);
6294 if (nflags == flags
6295 && gimplify_omp_ctxp->target_firstprivatize_array_bases
6296 && lang_hooks.decls.omp_privatize_by_reference (decl))
6297 type = TREE_TYPE (type);
6298 if (nflags == flags
6299 && !lang_hooks.types.omp_mappable_type (type))
6301 error ("%qD referenced in target region does not have "
6302 "a mappable type", decl);
6303 nflags |= GOVD_MAP | GOVD_EXPLICIT;
6305 else if (nflags == flags)
6307 if ((ctx->region_type & ORT_ACC) != 0)
6308 nflags = oacc_default_clause (ctx, decl, flags);
6309 else
6310 nflags |= GOVD_MAP;
6313 found_outer:
6314 omp_add_variable (ctx, decl, nflags);
6316 else
6318 /* If nothing changed, there's nothing left to do. */
6319 if ((n->value & flags) == flags)
6320 return ret;
6321 flags |= n->value;
6322 n->value = flags;
6324 goto do_outer;
6327 if (n == NULL)
6329 if (ctx->region_type == ORT_WORKSHARE
6330 || ctx->region_type == ORT_SIMD
6331 || ctx->region_type == ORT_ACC
6332 || (ctx->region_type & ORT_TARGET_DATA) != 0)
6333 goto do_outer;
6335 flags = omp_default_clause (ctx, decl, in_code, flags);
6337 if ((flags & GOVD_PRIVATE)
6338 && lang_hooks.decls.omp_private_outer_ref (decl))
6339 flags |= GOVD_PRIVATE_OUTER_REF;
6341 omp_add_variable (ctx, decl, flags);
6343 shared = (flags & GOVD_SHARED) != 0;
6344 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6345 goto do_outer;
6348 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
6349 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
6350 && DECL_SIZE (decl))
6352 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6354 splay_tree_node n2;
6355 tree t = DECL_VALUE_EXPR (decl);
6356 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6357 t = TREE_OPERAND (t, 0);
6358 gcc_assert (DECL_P (t));
6359 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6360 n2->value |= GOVD_SEEN;
6362 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
6363 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
6364 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
6365 != INTEGER_CST))
6367 splay_tree_node n2;
6368 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6369 gcc_assert (DECL_P (t));
6370 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6371 if (n2)
6372 n2->value |= GOVD_SEEN;
6376 shared = ((flags | n->value) & GOVD_SHARED) != 0;
6377 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6379 /* If nothing changed, there's nothing left to do. */
6380 if ((n->value & flags) == flags)
6381 return ret;
6382 flags |= n->value;
6383 n->value = flags;
6385 do_outer:
6386 /* If the variable is private in the current context, then we don't
6387 need to propagate anything to an outer context. */
6388 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
6389 return ret;
6390 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6391 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6392 return ret;
6393 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6394 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6395 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6396 return ret;
6397 if (ctx->outer_context
6398 && omp_notice_variable (ctx->outer_context, decl, in_code))
6399 return true;
6400 return ret;
6403 /* Verify that DECL is private within CTX. If there's specific information
6404 to the contrary in the innermost scope, generate an error. */
6406 static bool
6407 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
6409 splay_tree_node n;
6411 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6412 if (n != NULL)
6414 if (n->value & GOVD_SHARED)
6416 if (ctx == gimplify_omp_ctxp)
6418 if (simd)
6419 error ("iteration variable %qE is predetermined linear",
6420 DECL_NAME (decl));
6421 else
6422 error ("iteration variable %qE should be private",
6423 DECL_NAME (decl));
6424 n->value = GOVD_PRIVATE;
6425 return true;
6427 else
6428 return false;
6430 else if ((n->value & GOVD_EXPLICIT) != 0
6431 && (ctx == gimplify_omp_ctxp
6432 || (ctx->region_type == ORT_COMBINED_PARALLEL
6433 && gimplify_omp_ctxp->outer_context == ctx)))
6435 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
6436 error ("iteration variable %qE should not be firstprivate",
6437 DECL_NAME (decl));
6438 else if ((n->value & GOVD_REDUCTION) != 0)
6439 error ("iteration variable %qE should not be reduction",
6440 DECL_NAME (decl));
6441 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
6442 error ("iteration variable %qE should not be linear",
6443 DECL_NAME (decl));
6444 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
6445 error ("iteration variable %qE should not be lastprivate",
6446 DECL_NAME (decl));
6447 else if (simd && (n->value & GOVD_PRIVATE) != 0)
6448 error ("iteration variable %qE should not be private",
6449 DECL_NAME (decl));
6450 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
6451 error ("iteration variable %qE is predetermined linear",
6452 DECL_NAME (decl));
6454 return (ctx == gimplify_omp_ctxp
6455 || (ctx->region_type == ORT_COMBINED_PARALLEL
6456 && gimplify_omp_ctxp->outer_context == ctx));
6459 if (ctx->region_type != ORT_WORKSHARE
6460 && ctx->region_type != ORT_SIMD
6461 && ctx->region_type != ORT_ACC)
6462 return false;
6463 else if (ctx->outer_context)
6464 return omp_is_private (ctx->outer_context, decl, simd);
6465 return false;
6468 /* Return true if DECL is private within a parallel region
6469 that binds to the current construct's context or in parallel
6470 region's REDUCTION clause. */
6472 static bool
6473 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
6475 splay_tree_node n;
6479 ctx = ctx->outer_context;
6480 if (ctx == NULL)
6482 if (is_global_var (decl))
6483 return false;
6485 /* References might be private, but might be shared too,
6486 when checking for copyprivate, assume they might be
6487 private, otherwise assume they might be shared. */
6488 if (copyprivate)
6489 return true;
6491 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6492 return false;
6494 /* Treat C++ privatized non-static data members outside
6495 of the privatization the same. */
6496 if (omp_member_access_dummy_var (decl))
6497 return false;
6499 return true;
6502 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6504 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6505 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
6506 continue;
6508 if (n != NULL)
6510 if ((n->value & GOVD_LOCAL) != 0
6511 && omp_member_access_dummy_var (decl))
6512 return false;
6513 return (n->value & GOVD_SHARED) == 0;
6516 while (ctx->region_type == ORT_WORKSHARE
6517 || ctx->region_type == ORT_SIMD
6518 || ctx->region_type == ORT_ACC);
6519 return false;
6522 /* Return true if the CTX is combined with distribute and thus
6523 lastprivate can't be supported. */
6525 static bool
6526 omp_no_lastprivate (struct gimplify_omp_ctx *ctx)
6530 if (ctx->outer_context == NULL)
6531 return false;
6532 ctx = ctx->outer_context;
6533 switch (ctx->region_type)
6535 case ORT_WORKSHARE:
6536 if (!ctx->combined_loop)
6537 return false;
6538 if (ctx->distribute)
6539 return lang_GNU_Fortran ();
6540 break;
6541 case ORT_COMBINED_PARALLEL:
6542 break;
6543 case ORT_COMBINED_TEAMS:
6544 return lang_GNU_Fortran ();
6545 default:
6546 return false;
6549 while (1);
6552 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
6554 static tree
6555 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
6557 tree t = *tp;
6559 /* If this node has been visited, unmark it and keep looking. */
6560 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
6561 return t;
6563 if (IS_TYPE_OR_DECL_P (t))
6564 *walk_subtrees = 0;
6565 return NULL_TREE;
6568 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
6569 and previous omp contexts. */
6571 static void
6572 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6573 enum omp_region_type region_type,
6574 enum tree_code code)
6576 struct gimplify_omp_ctx *ctx, *outer_ctx;
6577 tree c;
6578 hash_map<tree, tree> *struct_map_to_clause = NULL;
6579 tree *prev_list_p = NULL;
6581 ctx = new_omp_context (region_type);
6582 outer_ctx = ctx->outer_context;
6583 if (code == OMP_TARGET && !lang_GNU_Fortran ())
6585 ctx->target_map_pointers_as_0len_arrays = true;
6586 /* FIXME: For Fortran we want to set this too, when
6587 the Fortran FE is updated to OpenMP 4.5. */
6588 ctx->target_map_scalars_firstprivate = true;
6590 if (!lang_GNU_Fortran ())
6591 switch (code)
6593 case OMP_TARGET:
6594 case OMP_TARGET_DATA:
6595 case OMP_TARGET_ENTER_DATA:
6596 case OMP_TARGET_EXIT_DATA:
6597 case OACC_HOST_DATA:
6598 ctx->target_firstprivatize_array_bases = true;
6599 default:
6600 break;
6603 while ((c = *list_p) != NULL)
6605 bool remove = false;
6606 bool notice_outer = true;
6607 const char *check_non_private = NULL;
6608 unsigned int flags;
6609 tree decl;
6611 switch (OMP_CLAUSE_CODE (c))
6613 case OMP_CLAUSE_PRIVATE:
6614 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6615 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6617 flags |= GOVD_PRIVATE_OUTER_REF;
6618 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6620 else
6621 notice_outer = false;
6622 goto do_add;
6623 case OMP_CLAUSE_SHARED:
6624 flags = GOVD_SHARED | GOVD_EXPLICIT;
6625 goto do_add;
6626 case OMP_CLAUSE_FIRSTPRIVATE:
6627 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6628 check_non_private = "firstprivate";
6629 goto do_add;
6630 case OMP_CLAUSE_LASTPRIVATE:
6631 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6632 check_non_private = "lastprivate";
6633 decl = OMP_CLAUSE_DECL (c);
6634 if (omp_no_lastprivate (ctx))
6636 notice_outer = false;
6637 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6639 else if (error_operand_p (decl))
6640 goto do_add;
6641 else if (outer_ctx
6642 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
6643 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
6644 && splay_tree_lookup (outer_ctx->variables,
6645 (splay_tree_key) decl) == NULL)
6647 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
6648 if (outer_ctx->outer_context)
6649 omp_notice_variable (outer_ctx->outer_context, decl, true);
6651 else if (outer_ctx
6652 && (outer_ctx->region_type & ORT_TASK) != 0
6653 && outer_ctx->combined_loop
6654 && splay_tree_lookup (outer_ctx->variables,
6655 (splay_tree_key) decl) == NULL)
6657 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6658 if (outer_ctx->outer_context)
6659 omp_notice_variable (outer_ctx->outer_context, decl, true);
6661 else if (outer_ctx
6662 && (outer_ctx->region_type == ORT_WORKSHARE
6663 || outer_ctx->region_type == ORT_ACC)
6664 && outer_ctx->combined_loop
6665 && splay_tree_lookup (outer_ctx->variables,
6666 (splay_tree_key) decl) == NULL
6667 && !omp_check_private (outer_ctx, decl, false))
6669 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6670 if (outer_ctx->outer_context
6671 && (outer_ctx->outer_context->region_type
6672 == ORT_COMBINED_PARALLEL)
6673 && splay_tree_lookup (outer_ctx->outer_context->variables,
6674 (splay_tree_key) decl) == NULL)
6676 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
6677 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
6678 if (octx->outer_context)
6679 omp_notice_variable (octx->outer_context, decl, true);
6681 else if (outer_ctx->outer_context)
6682 omp_notice_variable (outer_ctx->outer_context, decl, true);
6684 goto do_add;
6685 case OMP_CLAUSE_REDUCTION:
6686 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6687 /* OpenACC permits reductions on private variables. */
6688 if (!(region_type & ORT_ACC))
6689 check_non_private = "reduction";
6690 decl = OMP_CLAUSE_DECL (c);
6691 if (TREE_CODE (decl) == MEM_REF)
6693 tree type = TREE_TYPE (decl);
6694 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
6695 NULL, is_gimple_val, fb_rvalue, false)
6696 == GS_ERROR)
6698 remove = true;
6699 break;
6701 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6702 if (DECL_P (v))
6704 omp_firstprivatize_variable (ctx, v);
6705 omp_notice_variable (ctx, v, true);
6707 decl = TREE_OPERAND (decl, 0);
6708 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
6710 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
6711 NULL, is_gimple_val, fb_rvalue, false)
6712 == GS_ERROR)
6714 remove = true;
6715 break;
6717 v = TREE_OPERAND (decl, 1);
6718 if (DECL_P (v))
6720 omp_firstprivatize_variable (ctx, v);
6721 omp_notice_variable (ctx, v, true);
6723 decl = TREE_OPERAND (decl, 0);
6725 if (TREE_CODE (decl) == ADDR_EXPR
6726 || TREE_CODE (decl) == INDIRECT_REF)
6727 decl = TREE_OPERAND (decl, 0);
6729 goto do_add_decl;
6730 case OMP_CLAUSE_LINEAR:
6731 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6732 is_gimple_val, fb_rvalue) == GS_ERROR)
6734 remove = true;
6735 break;
6737 else
6739 if (code == OMP_SIMD
6740 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6742 struct gimplify_omp_ctx *octx = outer_ctx;
6743 if (octx
6744 && octx->region_type == ORT_WORKSHARE
6745 && octx->combined_loop
6746 && !octx->distribute)
6748 if (octx->outer_context
6749 && (octx->outer_context->region_type
6750 == ORT_COMBINED_PARALLEL))
6751 octx = octx->outer_context->outer_context;
6752 else
6753 octx = octx->outer_context;
6755 if (octx
6756 && octx->region_type == ORT_WORKSHARE
6757 && octx->combined_loop
6758 && octx->distribute
6759 && !lang_GNU_Fortran ())
6761 error_at (OMP_CLAUSE_LOCATION (c),
6762 "%<linear%> clause for variable other than "
6763 "loop iterator specified on construct "
6764 "combined with %<distribute%>");
6765 remove = true;
6766 break;
6769 /* For combined #pragma omp parallel for simd, need to put
6770 lastprivate and perhaps firstprivate too on the
6771 parallel. Similarly for #pragma omp for simd. */
6772 struct gimplify_omp_ctx *octx = outer_ctx;
6773 decl = NULL_TREE;
6774 if (omp_no_lastprivate (ctx))
6775 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6778 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6779 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6780 break;
6781 decl = OMP_CLAUSE_DECL (c);
6782 if (error_operand_p (decl))
6784 decl = NULL_TREE;
6785 break;
6787 flags = GOVD_SEEN;
6788 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6789 flags |= GOVD_FIRSTPRIVATE;
6790 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6791 flags |= GOVD_LASTPRIVATE;
6792 if (octx
6793 && octx->region_type == ORT_WORKSHARE
6794 && octx->combined_loop)
6796 if (octx->outer_context
6797 && (octx->outer_context->region_type
6798 == ORT_COMBINED_PARALLEL))
6799 octx = octx->outer_context;
6800 else if (omp_check_private (octx, decl, false))
6801 break;
6803 else if (octx
6804 && (octx->region_type & ORT_TASK) != 0
6805 && octx->combined_loop)
6807 else if (octx
6808 && octx->region_type == ORT_COMBINED_PARALLEL
6809 && ctx->region_type == ORT_WORKSHARE
6810 && octx == outer_ctx)
6811 flags = GOVD_SEEN | GOVD_SHARED;
6812 else if (octx
6813 && octx->region_type == ORT_COMBINED_TEAMS)
6814 flags = GOVD_SEEN | GOVD_SHARED;
6815 else if (octx
6816 && octx->region_type == ORT_COMBINED_TARGET)
6818 flags &= ~GOVD_LASTPRIVATE;
6819 if (flags == GOVD_SEEN)
6820 break;
6822 else
6823 break;
6824 splay_tree_node on
6825 = splay_tree_lookup (octx->variables,
6826 (splay_tree_key) decl);
6827 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
6829 octx = NULL;
6830 break;
6832 omp_add_variable (octx, decl, flags);
6833 if (octx->outer_context == NULL)
6834 break;
6835 octx = octx->outer_context;
6837 while (1);
6838 if (octx
6839 && decl
6840 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6841 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6842 omp_notice_variable (octx, decl, true);
6844 flags = GOVD_LINEAR | GOVD_EXPLICIT;
6845 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6846 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6848 notice_outer = false;
6849 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6851 goto do_add;
6853 case OMP_CLAUSE_MAP:
6854 decl = OMP_CLAUSE_DECL (c);
6855 if (error_operand_p (decl))
6856 remove = true;
6857 switch (code)
6859 case OMP_TARGET:
6860 break;
6861 case OACC_DATA:
6862 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
6863 break;
6864 case OMP_TARGET_DATA:
6865 case OMP_TARGET_ENTER_DATA:
6866 case OMP_TARGET_EXIT_DATA:
6867 case OACC_ENTER_DATA:
6868 case OACC_EXIT_DATA:
6869 case OACC_HOST_DATA:
6870 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6871 || (OMP_CLAUSE_MAP_KIND (c)
6872 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6873 /* For target {,enter ,exit }data only the array slice is
6874 mapped, but not the pointer to it. */
6875 remove = true;
6876 break;
6877 default:
6878 break;
6880 if (remove)
6881 break;
6882 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
6884 struct gimplify_omp_ctx *octx;
6885 for (octx = outer_ctx; octx; octx = octx->outer_context)
6887 if (octx->region_type != ORT_ACC_HOST_DATA)
6888 break;
6889 splay_tree_node n2
6890 = splay_tree_lookup (octx->variables,
6891 (splay_tree_key) decl);
6892 if (n2)
6893 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
6894 "declared in enclosing %<host_data%> region",
6895 DECL_NAME (decl));
6898 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6899 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6900 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6901 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6902 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6904 remove = true;
6905 break;
6907 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6908 || (OMP_CLAUSE_MAP_KIND (c)
6909 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6910 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
6912 OMP_CLAUSE_SIZE (c)
6913 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
6914 false);
6915 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
6916 GOVD_FIRSTPRIVATE | GOVD_SEEN);
6918 if (!DECL_P (decl))
6920 tree d = decl, *pd;
6921 if (TREE_CODE (d) == ARRAY_REF)
6923 while (TREE_CODE (d) == ARRAY_REF)
6924 d = TREE_OPERAND (d, 0);
6925 if (TREE_CODE (d) == COMPONENT_REF
6926 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
6927 decl = d;
6929 pd = &OMP_CLAUSE_DECL (c);
6930 if (d == decl
6931 && TREE_CODE (decl) == INDIRECT_REF
6932 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
6933 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
6934 == REFERENCE_TYPE))
6936 pd = &TREE_OPERAND (decl, 0);
6937 decl = TREE_OPERAND (decl, 0);
6939 if (TREE_CODE (decl) == COMPONENT_REF)
6941 while (TREE_CODE (decl) == COMPONENT_REF)
6942 decl = TREE_OPERAND (decl, 0);
6944 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
6945 == GS_ERROR)
6947 remove = true;
6948 break;
6950 if (DECL_P (decl))
6952 if (error_operand_p (decl))
6954 remove = true;
6955 break;
6958 if (TYPE_SIZE_UNIT (TREE_TYPE (decl)) == NULL
6959 || (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
6960 != INTEGER_CST))
6962 error_at (OMP_CLAUSE_LOCATION (c),
6963 "mapping field %qE of variable length "
6964 "structure", OMP_CLAUSE_DECL (c));
6965 remove = true;
6966 break;
6969 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
6971 /* Error recovery. */
6972 if (prev_list_p == NULL)
6974 remove = true;
6975 break;
6977 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
6979 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
6980 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
6982 remove = true;
6983 break;
6988 tree offset;
6989 HOST_WIDE_INT bitsize, bitpos;
6990 machine_mode mode;
6991 int unsignedp, reversep, volatilep = 0;
6992 tree base = OMP_CLAUSE_DECL (c);
6993 while (TREE_CODE (base) == ARRAY_REF)
6994 base = TREE_OPERAND (base, 0);
6995 if (TREE_CODE (base) == INDIRECT_REF)
6996 base = TREE_OPERAND (base, 0);
6997 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6998 &mode, &unsignedp, &reversep,
6999 &volatilep, false);
7000 gcc_assert (base == decl
7001 && (offset == NULL_TREE
7002 || TREE_CODE (offset) == INTEGER_CST));
7004 splay_tree_node n
7005 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7006 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7007 == GOMP_MAP_ALWAYS_POINTER);
7008 if (n == NULL || (n->value & GOVD_MAP) == 0)
7010 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7011 OMP_CLAUSE_MAP);
7012 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7013 OMP_CLAUSE_DECL (l) = decl;
7014 OMP_CLAUSE_SIZE (l) = size_int (1);
7015 if (struct_map_to_clause == NULL)
7016 struct_map_to_clause = new hash_map<tree, tree>;
7017 struct_map_to_clause->put (decl, l);
7018 if (ptr)
7020 enum gomp_map_kind mkind
7021 = code == OMP_TARGET_EXIT_DATA
7022 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7023 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7024 OMP_CLAUSE_MAP);
7025 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7026 OMP_CLAUSE_DECL (c2)
7027 = unshare_expr (OMP_CLAUSE_DECL (c));
7028 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7029 OMP_CLAUSE_SIZE (c2)
7030 = TYPE_SIZE_UNIT (ptr_type_node);
7031 OMP_CLAUSE_CHAIN (l) = c2;
7032 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7034 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7035 tree c3
7036 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7037 OMP_CLAUSE_MAP);
7038 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7039 OMP_CLAUSE_DECL (c3)
7040 = unshare_expr (OMP_CLAUSE_DECL (c4));
7041 OMP_CLAUSE_SIZE (c3)
7042 = TYPE_SIZE_UNIT (ptr_type_node);
7043 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7044 OMP_CLAUSE_CHAIN (c2) = c3;
7046 *prev_list_p = l;
7047 prev_list_p = NULL;
7049 else
7051 OMP_CLAUSE_CHAIN (l) = c;
7052 *list_p = l;
7053 list_p = &OMP_CLAUSE_CHAIN (l);
7055 flags = GOVD_MAP | GOVD_EXPLICIT;
7056 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7057 flags |= GOVD_SEEN;
7058 goto do_add_decl;
7060 else
7062 tree *osc = struct_map_to_clause->get (decl);
7063 tree *sc = NULL, *scp = NULL;
7064 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7065 n->value |= GOVD_SEEN;
7066 offset_int o1, o2;
7067 if (offset)
7068 o1 = wi::to_offset (offset);
7069 else
7070 o1 = 0;
7071 if (bitpos)
7072 o1 = o1 + bitpos / BITS_PER_UNIT;
7073 for (sc = &OMP_CLAUSE_CHAIN (*osc);
7074 *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7075 if (ptr && sc == prev_list_p)
7076 break;
7077 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7078 != COMPONENT_REF
7079 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7080 != INDIRECT_REF)
7081 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7082 != ARRAY_REF))
7083 break;
7084 else
7086 tree offset2;
7087 HOST_WIDE_INT bitsize2, bitpos2;
7088 base = OMP_CLAUSE_DECL (*sc);
7089 if (TREE_CODE (base) == ARRAY_REF)
7091 while (TREE_CODE (base) == ARRAY_REF)
7092 base = TREE_OPERAND (base, 0);
7093 if (TREE_CODE (base) != COMPONENT_REF
7094 || (TREE_CODE (TREE_TYPE (base))
7095 != ARRAY_TYPE))
7096 break;
7098 else if (TREE_CODE (base) == INDIRECT_REF
7099 && (TREE_CODE (TREE_OPERAND (base, 0))
7100 == COMPONENT_REF)
7101 && (TREE_CODE (TREE_TYPE
7102 (TREE_OPERAND (base, 0)))
7103 == REFERENCE_TYPE))
7104 base = TREE_OPERAND (base, 0);
7105 base = get_inner_reference (base, &bitsize2,
7106 &bitpos2, &offset2,
7107 &mode, &unsignedp,
7108 &reversep, &volatilep,
7109 false);
7110 if (base != decl)
7111 break;
7112 if (scp)
7113 continue;
7114 gcc_assert (offset == NULL_TREE
7115 || TREE_CODE (offset) == INTEGER_CST);
7116 tree d1 = OMP_CLAUSE_DECL (*sc);
7117 tree d2 = OMP_CLAUSE_DECL (c);
7118 while (TREE_CODE (d1) == ARRAY_REF)
7119 d1 = TREE_OPERAND (d1, 0);
7120 while (TREE_CODE (d2) == ARRAY_REF)
7121 d2 = TREE_OPERAND (d2, 0);
7122 if (TREE_CODE (d1) == INDIRECT_REF)
7123 d1 = TREE_OPERAND (d1, 0);
7124 if (TREE_CODE (d2) == INDIRECT_REF)
7125 d2 = TREE_OPERAND (d2, 0);
7126 while (TREE_CODE (d1) == COMPONENT_REF)
7127 if (TREE_CODE (d2) == COMPONENT_REF
7128 && TREE_OPERAND (d1, 1)
7129 == TREE_OPERAND (d2, 1))
7131 d1 = TREE_OPERAND (d1, 0);
7132 d2 = TREE_OPERAND (d2, 0);
7134 else
7135 break;
7136 if (d1 == d2)
7138 error_at (OMP_CLAUSE_LOCATION (c),
7139 "%qE appears more than once in map "
7140 "clauses", OMP_CLAUSE_DECL (c));
7141 remove = true;
7142 break;
7144 if (offset2)
7145 o2 = wi::to_offset (offset2);
7146 else
7147 o2 = 0;
7148 if (bitpos2)
7149 o2 = o2 + bitpos2 / BITS_PER_UNIT;
7150 if (wi::ltu_p (o1, o2)
7151 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
7153 if (ptr)
7154 scp = sc;
7155 else
7156 break;
7159 if (remove)
7160 break;
7161 OMP_CLAUSE_SIZE (*osc)
7162 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
7163 size_one_node);
7164 if (ptr)
7166 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7167 OMP_CLAUSE_MAP);
7168 tree cl = NULL_TREE;
7169 enum gomp_map_kind mkind
7170 = code == OMP_TARGET_EXIT_DATA
7171 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7172 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7173 OMP_CLAUSE_DECL (c2)
7174 = unshare_expr (OMP_CLAUSE_DECL (c));
7175 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
7176 OMP_CLAUSE_SIZE (c2)
7177 = TYPE_SIZE_UNIT (ptr_type_node);
7178 cl = scp ? *prev_list_p : c2;
7179 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7181 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7182 tree c3
7183 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7184 OMP_CLAUSE_MAP);
7185 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7186 OMP_CLAUSE_DECL (c3)
7187 = unshare_expr (OMP_CLAUSE_DECL (c4));
7188 OMP_CLAUSE_SIZE (c3)
7189 = TYPE_SIZE_UNIT (ptr_type_node);
7190 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7191 if (!scp)
7192 OMP_CLAUSE_CHAIN (c2) = c3;
7193 else
7194 cl = c3;
7196 if (scp)
7197 *scp = c2;
7198 if (sc == prev_list_p)
7200 *sc = cl;
7201 prev_list_p = NULL;
7203 else
7205 *prev_list_p = OMP_CLAUSE_CHAIN (c);
7206 list_p = prev_list_p;
7207 prev_list_p = NULL;
7208 OMP_CLAUSE_CHAIN (c) = *sc;
7209 *sc = cl;
7210 continue;
7213 else if (*sc != c)
7215 *list_p = OMP_CLAUSE_CHAIN (c);
7216 OMP_CLAUSE_CHAIN (c) = *sc;
7217 *sc = c;
7218 continue;
7222 if (!remove
7223 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
7224 && OMP_CLAUSE_CHAIN (c)
7225 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
7226 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
7227 == GOMP_MAP_ALWAYS_POINTER))
7228 prev_list_p = list_p;
7229 break;
7231 flags = GOVD_MAP | GOVD_EXPLICIT;
7232 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
7233 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
7234 flags |= GOVD_MAP_ALWAYS_TO;
7235 goto do_add;
7237 case OMP_CLAUSE_DEPEND:
7238 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
7239 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
7241 /* Nothing to do. OMP_CLAUSE_DECL will be lowered in
7242 omp-low.c. */
7243 break;
7245 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7247 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7248 NULL, is_gimple_val, fb_rvalue);
7249 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7251 if (error_operand_p (OMP_CLAUSE_DECL (c)))
7253 remove = true;
7254 break;
7256 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
7257 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7258 is_gimple_val, fb_rvalue) == GS_ERROR)
7260 remove = true;
7261 break;
7263 break;
7265 case OMP_CLAUSE_TO:
7266 case OMP_CLAUSE_FROM:
7267 case OMP_CLAUSE__CACHE_:
7268 decl = OMP_CLAUSE_DECL (c);
7269 if (error_operand_p (decl))
7271 remove = true;
7272 break;
7274 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7275 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7276 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7277 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7278 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7280 remove = true;
7281 break;
7283 if (!DECL_P (decl))
7285 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
7286 NULL, is_gimple_lvalue, fb_lvalue)
7287 == GS_ERROR)
7289 remove = true;
7290 break;
7292 break;
7294 goto do_notice;
7296 case OMP_CLAUSE_USE_DEVICE_PTR:
7297 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7298 goto do_add;
7299 case OMP_CLAUSE_IS_DEVICE_PTR:
7300 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7301 goto do_add;
7303 do_add:
7304 decl = OMP_CLAUSE_DECL (c);
7305 do_add_decl:
7306 if (error_operand_p (decl))
7308 remove = true;
7309 break;
7311 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
7313 tree t = omp_member_access_dummy_var (decl);
7314 if (t)
7316 tree v = DECL_VALUE_EXPR (decl);
7317 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
7318 if (outer_ctx)
7319 omp_notice_variable (outer_ctx, t, true);
7322 if (code == OACC_DATA
7323 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7324 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
7325 flags |= GOVD_MAP_0LEN_ARRAY;
7326 omp_add_variable (ctx, decl, flags);
7327 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7328 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7330 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
7331 GOVD_LOCAL | GOVD_SEEN);
7332 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
7333 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
7334 find_decl_expr,
7335 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7336 NULL) == NULL_TREE)
7337 omp_add_variable (ctx,
7338 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7339 GOVD_LOCAL | GOVD_SEEN);
7340 gimplify_omp_ctxp = ctx;
7341 push_gimplify_context ();
7343 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
7344 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7346 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
7347 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
7348 pop_gimplify_context
7349 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
7350 push_gimplify_context ();
7351 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
7352 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7353 pop_gimplify_context
7354 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
7355 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
7356 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
7358 gimplify_omp_ctxp = outer_ctx;
7360 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7361 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
7363 gimplify_omp_ctxp = ctx;
7364 push_gimplify_context ();
7365 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
7367 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7368 NULL, NULL);
7369 TREE_SIDE_EFFECTS (bind) = 1;
7370 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
7371 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
7373 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
7374 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7375 pop_gimplify_context
7376 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
7377 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
7379 gimplify_omp_ctxp = outer_ctx;
7381 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7382 && OMP_CLAUSE_LINEAR_STMT (c))
7384 gimplify_omp_ctxp = ctx;
7385 push_gimplify_context ();
7386 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
7388 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7389 NULL, NULL);
7390 TREE_SIDE_EFFECTS (bind) = 1;
7391 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
7392 OMP_CLAUSE_LINEAR_STMT (c) = bind;
7394 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
7395 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7396 pop_gimplify_context
7397 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
7398 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
7400 gimplify_omp_ctxp = outer_ctx;
7402 if (notice_outer)
7403 goto do_notice;
7404 break;
7406 case OMP_CLAUSE_COPYIN:
7407 case OMP_CLAUSE_COPYPRIVATE:
7408 decl = OMP_CLAUSE_DECL (c);
7409 if (error_operand_p (decl))
7411 remove = true;
7412 break;
7414 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
7415 && !remove
7416 && !omp_check_private (ctx, decl, true))
7418 remove = true;
7419 if (is_global_var (decl))
7421 if (DECL_THREAD_LOCAL_P (decl))
7422 remove = false;
7423 else if (DECL_HAS_VALUE_EXPR_P (decl))
7425 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7427 if (value
7428 && DECL_P (value)
7429 && DECL_THREAD_LOCAL_P (value))
7430 remove = false;
7433 if (remove)
7434 error_at (OMP_CLAUSE_LOCATION (c),
7435 "copyprivate variable %qE is not threadprivate"
7436 " or private in outer context", DECL_NAME (decl));
7438 do_notice:
7439 if (outer_ctx)
7440 omp_notice_variable (outer_ctx, decl, true);
7441 if (check_non_private
7442 && region_type == ORT_WORKSHARE
7443 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7444 || decl == OMP_CLAUSE_DECL (c)
7445 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
7446 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7447 == ADDR_EXPR
7448 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7449 == POINTER_PLUS_EXPR
7450 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
7451 (OMP_CLAUSE_DECL (c), 0), 0))
7452 == ADDR_EXPR)))))
7453 && omp_check_private (ctx, decl, false))
7455 error ("%s variable %qE is private in outer context",
7456 check_non_private, DECL_NAME (decl));
7457 remove = true;
7459 break;
7461 case OMP_CLAUSE_IF:
7462 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
7463 && OMP_CLAUSE_IF_MODIFIER (c) != code)
7465 const char *p[2];
7466 for (int i = 0; i < 2; i++)
7467 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
7469 case OMP_PARALLEL: p[i] = "parallel"; break;
7470 case OMP_TASK: p[i] = "task"; break;
7471 case OMP_TASKLOOP: p[i] = "taskloop"; break;
7472 case OMP_TARGET_DATA: p[i] = "target data"; break;
7473 case OMP_TARGET: p[i] = "target"; break;
7474 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
7475 case OMP_TARGET_ENTER_DATA:
7476 p[i] = "target enter data"; break;
7477 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
7478 default: gcc_unreachable ();
7480 error_at (OMP_CLAUSE_LOCATION (c),
7481 "expected %qs %<if%> clause modifier rather than %qs",
7482 p[0], p[1]);
7483 remove = true;
7485 /* Fall through. */
7487 case OMP_CLAUSE_FINAL:
7488 OMP_CLAUSE_OPERAND (c, 0)
7489 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
7490 /* Fall through. */
7492 case OMP_CLAUSE_SCHEDULE:
7493 case OMP_CLAUSE_NUM_THREADS:
7494 case OMP_CLAUSE_NUM_TEAMS:
7495 case OMP_CLAUSE_THREAD_LIMIT:
7496 case OMP_CLAUSE_DIST_SCHEDULE:
7497 case OMP_CLAUSE_DEVICE:
7498 case OMP_CLAUSE_PRIORITY:
7499 case OMP_CLAUSE_GRAINSIZE:
7500 case OMP_CLAUSE_NUM_TASKS:
7501 case OMP_CLAUSE_HINT:
7502 case OMP_CLAUSE__CILK_FOR_COUNT_:
7503 case OMP_CLAUSE_ASYNC:
7504 case OMP_CLAUSE_WAIT:
7505 case OMP_CLAUSE_NUM_GANGS:
7506 case OMP_CLAUSE_NUM_WORKERS:
7507 case OMP_CLAUSE_VECTOR_LENGTH:
7508 case OMP_CLAUSE_WORKER:
7509 case OMP_CLAUSE_VECTOR:
7510 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7511 is_gimple_val, fb_rvalue) == GS_ERROR)
7512 remove = true;
7513 break;
7515 case OMP_CLAUSE_GANG:
7516 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7517 is_gimple_val, fb_rvalue) == GS_ERROR)
7518 remove = true;
7519 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
7520 is_gimple_val, fb_rvalue) == GS_ERROR)
7521 remove = true;
7522 break;
7524 case OMP_CLAUSE_TILE:
7525 for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list;
7526 list = TREE_CHAIN (list))
7528 if (gimplify_expr (&TREE_VALUE (list), pre_p, NULL,
7529 is_gimple_val, fb_rvalue) == GS_ERROR)
7530 remove = true;
7532 break;
7534 case OMP_CLAUSE_DEVICE_RESIDENT:
7535 remove = true;
7536 break;
7538 case OMP_CLAUSE_NOWAIT:
7539 case OMP_CLAUSE_ORDERED:
7540 case OMP_CLAUSE_UNTIED:
7541 case OMP_CLAUSE_COLLAPSE:
7542 case OMP_CLAUSE_AUTO:
7543 case OMP_CLAUSE_SEQ:
7544 case OMP_CLAUSE_INDEPENDENT:
7545 case OMP_CLAUSE_MERGEABLE:
7546 case OMP_CLAUSE_PROC_BIND:
7547 case OMP_CLAUSE_SAFELEN:
7548 case OMP_CLAUSE_SIMDLEN:
7549 case OMP_CLAUSE_NOGROUP:
7550 case OMP_CLAUSE_THREADS:
7551 case OMP_CLAUSE_SIMD:
7552 break;
7554 case OMP_CLAUSE_DEFAULTMAP:
7555 ctx->target_map_scalars_firstprivate = false;
7556 break;
7558 case OMP_CLAUSE_ALIGNED:
7559 decl = OMP_CLAUSE_DECL (c);
7560 if (error_operand_p (decl))
7562 remove = true;
7563 break;
7565 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
7566 is_gimple_val, fb_rvalue) == GS_ERROR)
7568 remove = true;
7569 break;
7571 if (!is_global_var (decl)
7572 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7573 omp_add_variable (ctx, decl, GOVD_ALIGNED);
7574 break;
7576 case OMP_CLAUSE_DEFAULT:
7577 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
7578 break;
7580 default:
7581 gcc_unreachable ();
7584 if (code == OACC_DATA
7585 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7586 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
7587 remove = true;
7588 if (remove)
7589 *list_p = OMP_CLAUSE_CHAIN (c);
7590 else
7591 list_p = &OMP_CLAUSE_CHAIN (c);
7594 gimplify_omp_ctxp = ctx;
7595 if (struct_map_to_clause)
7596 delete struct_map_to_clause;
7599 /* Return true if DECL is a candidate for shared to firstprivate
7600 optimization. We only consider non-addressable scalars, not
7601 too big, and not references. */
7603 static bool
7604 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
7606 if (TREE_ADDRESSABLE (decl))
7607 return false;
7608 tree type = TREE_TYPE (decl);
7609 if (!is_gimple_reg_type (type)
7610 || TREE_CODE (type) == REFERENCE_TYPE
7611 || TREE_ADDRESSABLE (type))
7612 return false;
7613 /* Don't optimize too large decls, as each thread/task will have
7614 its own. */
7615 HOST_WIDE_INT len = int_size_in_bytes (type);
7616 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
7617 return false;
7618 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7619 return false;
7620 return true;
7623 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
7624 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
7625 GOVD_WRITTEN in outer contexts. */
7627 static void
7628 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
7630 for (; ctx; ctx = ctx->outer_context)
7632 splay_tree_node n = splay_tree_lookup (ctx->variables,
7633 (splay_tree_key) decl);
7634 if (n == NULL)
7635 continue;
7636 else if (n->value & GOVD_SHARED)
7638 n->value |= GOVD_WRITTEN;
7639 return;
7641 else if (n->value & GOVD_DATA_SHARE_CLASS)
7642 return;
7646 /* Helper callback for walk_gimple_seq to discover possible stores
7647 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7648 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7649 for those. */
7651 static tree
7652 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
7654 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
7656 *walk_subtrees = 0;
7657 if (!wi->is_lhs)
7658 return NULL_TREE;
7660 tree op = *tp;
7663 if (handled_component_p (op))
7664 op = TREE_OPERAND (op, 0);
7665 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
7666 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
7667 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
7668 else
7669 break;
7671 while (1);
7672 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
7673 return NULL_TREE;
7675 omp_mark_stores (gimplify_omp_ctxp, op);
7676 return NULL_TREE;
7679 /* Helper callback for walk_gimple_seq to discover possible stores
7680 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7681 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7682 for those. */
7684 static tree
7685 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
7686 bool *handled_ops_p,
7687 struct walk_stmt_info *wi)
7689 gimple *stmt = gsi_stmt (*gsi_p);
7690 switch (gimple_code (stmt))
7692 /* Don't recurse on OpenMP constructs for which
7693 gimplify_adjust_omp_clauses already handled the bodies,
7694 except handle gimple_omp_for_pre_body. */
7695 case GIMPLE_OMP_FOR:
7696 *handled_ops_p = true;
7697 if (gimple_omp_for_pre_body (stmt))
7698 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
7699 omp_find_stores_stmt, omp_find_stores_op, wi);
7700 break;
7701 case GIMPLE_OMP_PARALLEL:
7702 case GIMPLE_OMP_TASK:
7703 case GIMPLE_OMP_SECTIONS:
7704 case GIMPLE_OMP_SINGLE:
7705 case GIMPLE_OMP_TARGET:
7706 case GIMPLE_OMP_TEAMS:
7707 case GIMPLE_OMP_CRITICAL:
7708 *handled_ops_p = true;
7709 break;
7710 default:
7711 break;
7713 return NULL_TREE;
7716 struct gimplify_adjust_omp_clauses_data
7718 tree *list_p;
7719 gimple_seq *pre_p;
7722 /* For all variables that were not actually used within the context,
7723 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
7725 static int
7726 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
7728 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
7729 gimple_seq *pre_p
7730 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
7731 tree decl = (tree) n->key;
7732 unsigned flags = n->value;
7733 enum omp_clause_code code;
7734 tree clause;
7735 bool private_debug;
7737 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
7738 return 0;
7739 if ((flags & GOVD_SEEN) == 0)
7740 return 0;
7741 if (flags & GOVD_DEBUG_PRIVATE)
7743 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
7744 private_debug = true;
7746 else if (flags & GOVD_MAP)
7747 private_debug = false;
7748 else
7749 private_debug
7750 = lang_hooks.decls.omp_private_debug_clause (decl,
7751 !!(flags & GOVD_SHARED));
7752 if (private_debug)
7753 code = OMP_CLAUSE_PRIVATE;
7754 else if (flags & GOVD_MAP)
7755 code = OMP_CLAUSE_MAP;
7756 else if (flags & GOVD_SHARED)
7758 if (is_global_var (decl))
7760 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7761 while (ctx != NULL)
7763 splay_tree_node on
7764 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7765 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7766 | GOVD_PRIVATE | GOVD_REDUCTION
7767 | GOVD_LINEAR | GOVD_MAP)) != 0)
7768 break;
7769 ctx = ctx->outer_context;
7771 if (ctx == NULL)
7772 return 0;
7774 code = OMP_CLAUSE_SHARED;
7776 else if (flags & GOVD_PRIVATE)
7777 code = OMP_CLAUSE_PRIVATE;
7778 else if (flags & GOVD_FIRSTPRIVATE)
7779 code = OMP_CLAUSE_FIRSTPRIVATE;
7780 else if (flags & GOVD_LASTPRIVATE)
7781 code = OMP_CLAUSE_LASTPRIVATE;
7782 else if (flags & GOVD_ALIGNED)
7783 return 0;
7784 else
7785 gcc_unreachable ();
7787 if (((flags & GOVD_LASTPRIVATE)
7788 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
7789 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7790 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7792 clause = build_omp_clause (input_location, code);
7793 OMP_CLAUSE_DECL (clause) = decl;
7794 OMP_CLAUSE_CHAIN (clause) = *list_p;
7795 if (private_debug)
7796 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
7797 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
7798 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
7799 else if (code == OMP_CLAUSE_SHARED
7800 && (flags & GOVD_WRITTEN) == 0
7801 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7802 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
7803 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
7804 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
7805 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
7807 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
7808 OMP_CLAUSE_DECL (nc) = decl;
7809 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7810 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
7811 OMP_CLAUSE_DECL (clause)
7812 = build_simple_mem_ref_loc (input_location, decl);
7813 OMP_CLAUSE_DECL (clause)
7814 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
7815 build_int_cst (build_pointer_type (char_type_node), 0));
7816 OMP_CLAUSE_SIZE (clause) = size_zero_node;
7817 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7818 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
7819 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
7820 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7821 OMP_CLAUSE_CHAIN (nc) = *list_p;
7822 OMP_CLAUSE_CHAIN (clause) = nc;
7823 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7824 gimplify_omp_ctxp = ctx->outer_context;
7825 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
7826 pre_p, NULL, is_gimple_val, fb_rvalue);
7827 gimplify_omp_ctxp = ctx;
7829 else if (code == OMP_CLAUSE_MAP)
7831 int kind = (flags & GOVD_MAP_TO_ONLY
7832 ? GOMP_MAP_TO
7833 : GOMP_MAP_TOFROM);
7834 if (flags & GOVD_MAP_FORCE)
7835 kind |= GOMP_MAP_FLAG_FORCE;
7836 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
7837 if (DECL_SIZE (decl)
7838 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7840 tree decl2 = DECL_VALUE_EXPR (decl);
7841 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7842 decl2 = TREE_OPERAND (decl2, 0);
7843 gcc_assert (DECL_P (decl2));
7844 tree mem = build_simple_mem_ref (decl2);
7845 OMP_CLAUSE_DECL (clause) = mem;
7846 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7847 if (gimplify_omp_ctxp->outer_context)
7849 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7850 omp_notice_variable (ctx, decl2, true);
7851 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
7853 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7854 OMP_CLAUSE_MAP);
7855 OMP_CLAUSE_DECL (nc) = decl;
7856 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7857 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
7858 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7859 else
7860 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
7861 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7862 OMP_CLAUSE_CHAIN (clause) = nc;
7864 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7865 && lang_hooks.decls.omp_privatize_by_reference (decl))
7867 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
7868 OMP_CLAUSE_SIZE (clause)
7869 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
7870 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7871 gimplify_omp_ctxp = ctx->outer_context;
7872 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
7873 pre_p, NULL, is_gimple_val, fb_rvalue);
7874 gimplify_omp_ctxp = ctx;
7875 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7876 OMP_CLAUSE_MAP);
7877 OMP_CLAUSE_DECL (nc) = decl;
7878 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7879 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
7880 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7881 OMP_CLAUSE_CHAIN (clause) = nc;
7883 else
7884 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
7886 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
7888 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
7889 OMP_CLAUSE_DECL (nc) = decl;
7890 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
7891 OMP_CLAUSE_CHAIN (nc) = *list_p;
7892 OMP_CLAUSE_CHAIN (clause) = nc;
7893 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7894 gimplify_omp_ctxp = ctx->outer_context;
7895 lang_hooks.decls.omp_finish_clause (nc, pre_p);
7896 gimplify_omp_ctxp = ctx;
7898 *list_p = clause;
7899 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7900 gimplify_omp_ctxp = ctx->outer_context;
7901 lang_hooks.decls.omp_finish_clause (clause, pre_p);
7902 gimplify_omp_ctxp = ctx;
7903 return 0;
7906 static void
7907 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
7908 enum tree_code code)
7910 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7911 tree c, decl;
7913 if (body)
7915 struct gimplify_omp_ctx *octx;
7916 for (octx = ctx; octx; octx = octx->outer_context)
7917 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
7918 break;
7919 if (octx)
7921 struct walk_stmt_info wi;
7922 memset (&wi, 0, sizeof (wi));
7923 walk_gimple_seq (body, omp_find_stores_stmt,
7924 omp_find_stores_op, &wi);
7927 while ((c = *list_p) != NULL)
7929 splay_tree_node n;
7930 bool remove = false;
7932 switch (OMP_CLAUSE_CODE (c))
7934 case OMP_CLAUSE_PRIVATE:
7935 case OMP_CLAUSE_SHARED:
7936 case OMP_CLAUSE_FIRSTPRIVATE:
7937 case OMP_CLAUSE_LINEAR:
7938 decl = OMP_CLAUSE_DECL (c);
7939 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7940 remove = !(n->value & GOVD_SEEN);
7941 if (! remove)
7943 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
7944 if ((n->value & GOVD_DEBUG_PRIVATE)
7945 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
7947 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
7948 || ((n->value & GOVD_DATA_SHARE_CLASS)
7949 == GOVD_PRIVATE));
7950 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
7951 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
7953 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7954 && (n->value & GOVD_WRITTEN) == 0
7955 && DECL_P (decl)
7956 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7957 OMP_CLAUSE_SHARED_READONLY (c) = 1;
7958 else if (DECL_P (decl)
7959 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7960 && (n->value & GOVD_WRITTEN) != 1)
7961 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7962 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7963 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7964 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7966 break;
7968 case OMP_CLAUSE_LASTPRIVATE:
7969 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
7970 accurately reflect the presence of a FIRSTPRIVATE clause. */
7971 decl = OMP_CLAUSE_DECL (c);
7972 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7973 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7974 = (n->value & GOVD_FIRSTPRIVATE) != 0;
7975 if (omp_no_lastprivate (ctx))
7977 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7978 remove = true;
7979 else
7980 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE;
7982 else if (code == OMP_DISTRIBUTE
7983 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7985 remove = true;
7986 error_at (OMP_CLAUSE_LOCATION (c),
7987 "same variable used in %<firstprivate%> and "
7988 "%<lastprivate%> clauses on %<distribute%> "
7989 "construct");
7991 if (!remove
7992 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7993 && DECL_P (decl)
7994 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7995 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7996 break;
7998 case OMP_CLAUSE_ALIGNED:
7999 decl = OMP_CLAUSE_DECL (c);
8000 if (!is_global_var (decl))
8002 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8003 remove = n == NULL || !(n->value & GOVD_SEEN);
8004 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8006 struct gimplify_omp_ctx *octx;
8007 if (n != NULL
8008 && (n->value & (GOVD_DATA_SHARE_CLASS
8009 & ~GOVD_FIRSTPRIVATE)))
8010 remove = true;
8011 else
8012 for (octx = ctx->outer_context; octx;
8013 octx = octx->outer_context)
8015 n = splay_tree_lookup (octx->variables,
8016 (splay_tree_key) decl);
8017 if (n == NULL)
8018 continue;
8019 if (n->value & GOVD_LOCAL)
8020 break;
8021 /* We have to avoid assigning a shared variable
8022 to itself when trying to add
8023 __builtin_assume_aligned. */
8024 if (n->value & GOVD_SHARED)
8026 remove = true;
8027 break;
8032 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8034 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8035 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8036 remove = true;
8038 break;
8040 case OMP_CLAUSE_MAP:
8041 if (code == OMP_TARGET_EXIT_DATA
8042 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8044 remove = true;
8045 break;
8047 decl = OMP_CLAUSE_DECL (c);
8048 /* Data clauses associated with acc parallel reductions must be
8049 compatible with present_or_copy. Warn and adjust the clause
8050 if that is not the case. */
8051 if (ctx->region_type == ORT_ACC_PARALLEL)
8053 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
8054 n = NULL;
8056 if (DECL_P (t))
8057 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8059 if (n && (n->value & GOVD_REDUCTION))
8061 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
8063 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
8064 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
8065 && kind != GOMP_MAP_FORCE_PRESENT
8066 && kind != GOMP_MAP_POINTER)
8068 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8069 "incompatible data clause with reduction "
8070 "on %qE; promoting to present_or_copy",
8071 DECL_NAME (t));
8072 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
8076 if (!DECL_P (decl))
8078 if ((ctx->region_type & ORT_TARGET) != 0
8079 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8081 if (TREE_CODE (decl) == INDIRECT_REF
8082 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8083 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8084 == REFERENCE_TYPE))
8085 decl = TREE_OPERAND (decl, 0);
8086 if (TREE_CODE (decl) == COMPONENT_REF)
8088 while (TREE_CODE (decl) == COMPONENT_REF)
8089 decl = TREE_OPERAND (decl, 0);
8090 if (DECL_P (decl))
8092 n = splay_tree_lookup (ctx->variables,
8093 (splay_tree_key) decl);
8094 if (!(n->value & GOVD_SEEN))
8095 remove = true;
8099 break;
8101 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8102 if ((ctx->region_type & ORT_TARGET) != 0
8103 && !(n->value & GOVD_SEEN)
8104 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
8105 && !lookup_attribute ("omp declare target link",
8106 DECL_ATTRIBUTES (decl)))
8108 remove = true;
8109 /* For struct element mapping, if struct is never referenced
8110 in target block and none of the mapping has always modifier,
8111 remove all the struct element mappings, which immediately
8112 follow the GOMP_MAP_STRUCT map clause. */
8113 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
8115 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
8116 while (cnt--)
8117 OMP_CLAUSE_CHAIN (c)
8118 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
8121 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
8122 && code == OMP_TARGET_EXIT_DATA)
8123 remove = true;
8124 else if (DECL_SIZE (decl)
8125 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
8126 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
8127 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
8128 && (OMP_CLAUSE_MAP_KIND (c)
8129 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8131 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
8132 for these, TREE_CODE (DECL_SIZE (decl)) will always be
8133 INTEGER_CST. */
8134 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
8136 tree decl2 = DECL_VALUE_EXPR (decl);
8137 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8138 decl2 = TREE_OPERAND (decl2, 0);
8139 gcc_assert (DECL_P (decl2));
8140 tree mem = build_simple_mem_ref (decl2);
8141 OMP_CLAUSE_DECL (c) = mem;
8142 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8143 if (ctx->outer_context)
8145 omp_notice_variable (ctx->outer_context, decl2, true);
8146 omp_notice_variable (ctx->outer_context,
8147 OMP_CLAUSE_SIZE (c), true);
8149 if (((ctx->region_type & ORT_TARGET) != 0
8150 || !ctx->target_firstprivatize_array_bases)
8151 && ((n->value & GOVD_SEEN) == 0
8152 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
8154 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8155 OMP_CLAUSE_MAP);
8156 OMP_CLAUSE_DECL (nc) = decl;
8157 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8158 if (ctx->target_firstprivatize_array_bases)
8159 OMP_CLAUSE_SET_MAP_KIND (nc,
8160 GOMP_MAP_FIRSTPRIVATE_POINTER);
8161 else
8162 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8163 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
8164 OMP_CLAUSE_CHAIN (c) = nc;
8165 c = nc;
8168 else
8170 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8171 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
8172 gcc_assert ((n->value & GOVD_SEEN) == 0
8173 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
8174 == 0));
8176 break;
8178 case OMP_CLAUSE_TO:
8179 case OMP_CLAUSE_FROM:
8180 case OMP_CLAUSE__CACHE_:
8181 decl = OMP_CLAUSE_DECL (c);
8182 if (!DECL_P (decl))
8183 break;
8184 if (DECL_SIZE (decl)
8185 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8187 tree decl2 = DECL_VALUE_EXPR (decl);
8188 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8189 decl2 = TREE_OPERAND (decl2, 0);
8190 gcc_assert (DECL_P (decl2));
8191 tree mem = build_simple_mem_ref (decl2);
8192 OMP_CLAUSE_DECL (c) = mem;
8193 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8194 if (ctx->outer_context)
8196 omp_notice_variable (ctx->outer_context, decl2, true);
8197 omp_notice_variable (ctx->outer_context,
8198 OMP_CLAUSE_SIZE (c), true);
8201 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8202 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
8203 break;
8205 case OMP_CLAUSE_REDUCTION:
8206 decl = OMP_CLAUSE_DECL (c);
8207 /* OpenACC reductions need a present_or_copy data clause.
8208 Add one if necessary. Error is the reduction is private. */
8209 if (ctx->region_type == ORT_ACC_PARALLEL)
8211 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8212 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
8213 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
8214 "reduction on %qE", DECL_NAME (decl));
8215 else if ((n->value & GOVD_MAP) == 0)
8217 tree next = OMP_CLAUSE_CHAIN (c);
8218 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
8219 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
8220 OMP_CLAUSE_DECL (nc) = decl;
8221 OMP_CLAUSE_CHAIN (c) = nc;
8222 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8223 while (1)
8225 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
8226 if (OMP_CLAUSE_CHAIN (nc) == NULL)
8227 break;
8228 nc = OMP_CLAUSE_CHAIN (nc);
8230 OMP_CLAUSE_CHAIN (nc) = next;
8231 n->value |= GOVD_MAP;
8234 if (DECL_P (decl)
8235 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8236 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8237 break;
8238 case OMP_CLAUSE_COPYIN:
8239 case OMP_CLAUSE_COPYPRIVATE:
8240 case OMP_CLAUSE_IF:
8241 case OMP_CLAUSE_NUM_THREADS:
8242 case OMP_CLAUSE_NUM_TEAMS:
8243 case OMP_CLAUSE_THREAD_LIMIT:
8244 case OMP_CLAUSE_DIST_SCHEDULE:
8245 case OMP_CLAUSE_DEVICE:
8246 case OMP_CLAUSE_SCHEDULE:
8247 case OMP_CLAUSE_NOWAIT:
8248 case OMP_CLAUSE_ORDERED:
8249 case OMP_CLAUSE_DEFAULT:
8250 case OMP_CLAUSE_UNTIED:
8251 case OMP_CLAUSE_COLLAPSE:
8252 case OMP_CLAUSE_FINAL:
8253 case OMP_CLAUSE_MERGEABLE:
8254 case OMP_CLAUSE_PROC_BIND:
8255 case OMP_CLAUSE_SAFELEN:
8256 case OMP_CLAUSE_SIMDLEN:
8257 case OMP_CLAUSE_DEPEND:
8258 case OMP_CLAUSE_PRIORITY:
8259 case OMP_CLAUSE_GRAINSIZE:
8260 case OMP_CLAUSE_NUM_TASKS:
8261 case OMP_CLAUSE_NOGROUP:
8262 case OMP_CLAUSE_THREADS:
8263 case OMP_CLAUSE_SIMD:
8264 case OMP_CLAUSE_HINT:
8265 case OMP_CLAUSE_DEFAULTMAP:
8266 case OMP_CLAUSE_USE_DEVICE_PTR:
8267 case OMP_CLAUSE_IS_DEVICE_PTR:
8268 case OMP_CLAUSE__CILK_FOR_COUNT_:
8269 case OMP_CLAUSE_ASYNC:
8270 case OMP_CLAUSE_WAIT:
8271 case OMP_CLAUSE_DEVICE_RESIDENT:
8272 case OMP_CLAUSE_INDEPENDENT:
8273 case OMP_CLAUSE_NUM_GANGS:
8274 case OMP_CLAUSE_NUM_WORKERS:
8275 case OMP_CLAUSE_VECTOR_LENGTH:
8276 case OMP_CLAUSE_GANG:
8277 case OMP_CLAUSE_WORKER:
8278 case OMP_CLAUSE_VECTOR:
8279 case OMP_CLAUSE_AUTO:
8280 case OMP_CLAUSE_SEQ:
8281 case OMP_CLAUSE_TILE:
8282 break;
8284 default:
8285 gcc_unreachable ();
8288 if (remove)
8289 *list_p = OMP_CLAUSE_CHAIN (c);
8290 else
8291 list_p = &OMP_CLAUSE_CHAIN (c);
8294 /* Add in any implicit data sharing. */
8295 struct gimplify_adjust_omp_clauses_data data;
8296 data.list_p = list_p;
8297 data.pre_p = pre_p;
8298 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
8300 gimplify_omp_ctxp = ctx->outer_context;
8301 delete_omp_context (ctx);
8304 /* Gimplify OACC_CACHE. */
8306 static void
8307 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
8309 tree expr = *expr_p;
8311 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
8312 OACC_CACHE);
8313 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
8314 OACC_CACHE);
8316 /* TODO: Do something sensible with this information. */
8318 *expr_p = NULL_TREE;
8321 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
8322 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
8323 kind. The entry kind will replace the one in CLAUSE, while the exit
8324 kind will be used in a new omp_clause and returned to the caller. */
8326 static tree
8327 gimplify_oacc_declare_1 (tree clause)
8329 HOST_WIDE_INT kind, new_op;
8330 bool ret = false;
8331 tree c = NULL;
8333 kind = OMP_CLAUSE_MAP_KIND (clause);
8335 switch (kind)
8337 case GOMP_MAP_ALLOC:
8338 case GOMP_MAP_FORCE_ALLOC:
8339 case GOMP_MAP_FORCE_TO:
8340 new_op = GOMP_MAP_DELETE;
8341 ret = true;
8342 break;
8344 case GOMP_MAP_FORCE_FROM:
8345 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8346 new_op = GOMP_MAP_FORCE_FROM;
8347 ret = true;
8348 break;
8350 case GOMP_MAP_FORCE_TOFROM:
8351 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
8352 new_op = GOMP_MAP_FORCE_FROM;
8353 ret = true;
8354 break;
8356 case GOMP_MAP_FROM:
8357 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8358 new_op = GOMP_MAP_FROM;
8359 ret = true;
8360 break;
8362 case GOMP_MAP_TOFROM:
8363 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
8364 new_op = GOMP_MAP_FROM;
8365 ret = true;
8366 break;
8368 case GOMP_MAP_DEVICE_RESIDENT:
8369 case GOMP_MAP_FORCE_DEVICEPTR:
8370 case GOMP_MAP_FORCE_PRESENT:
8371 case GOMP_MAP_LINK:
8372 case GOMP_MAP_POINTER:
8373 case GOMP_MAP_TO:
8374 break;
8376 default:
8377 gcc_unreachable ();
8378 break;
8381 if (ret)
8383 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
8384 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
8385 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
8388 return c;
8391 /* Gimplify OACC_DECLARE. */
8393 static void
8394 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
8396 tree expr = *expr_p;
8397 gomp_target *stmt;
8398 tree clauses, t;
8400 clauses = OACC_DECLARE_CLAUSES (expr);
8402 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
8404 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
8406 tree decl = OMP_CLAUSE_DECL (t);
8408 if (TREE_CODE (decl) == MEM_REF)
8409 continue;
8411 if (TREE_CODE (decl) == VAR_DECL
8412 && !is_global_var (decl)
8413 && DECL_CONTEXT (decl) == current_function_decl)
8415 tree c = gimplify_oacc_declare_1 (t);
8416 if (c)
8418 if (oacc_declare_returns == NULL)
8419 oacc_declare_returns = new hash_map<tree, tree>;
8421 oacc_declare_returns->put (decl, c);
8425 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
8428 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
8429 clauses);
8431 gimplify_seq_add_stmt (pre_p, stmt);
8433 *expr_p = NULL_TREE;
8436 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
8437 gimplification of the body, as well as scanning the body for used
8438 variables. We need to do this scan now, because variable-sized
8439 decls will be decomposed during gimplification. */
8441 static void
8442 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
8444 tree expr = *expr_p;
8445 gimple *g;
8446 gimple_seq body = NULL;
8448 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
8449 OMP_PARALLEL_COMBINED (expr)
8450 ? ORT_COMBINED_PARALLEL
8451 : ORT_PARALLEL, OMP_PARALLEL);
8453 push_gimplify_context ();
8455 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
8456 if (gimple_code (g) == GIMPLE_BIND)
8457 pop_gimplify_context (g);
8458 else
8459 pop_gimplify_context (NULL);
8461 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
8462 OMP_PARALLEL);
8464 g = gimple_build_omp_parallel (body,
8465 OMP_PARALLEL_CLAUSES (expr),
8466 NULL_TREE, NULL_TREE);
8467 if (OMP_PARALLEL_COMBINED (expr))
8468 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
8469 gimplify_seq_add_stmt (pre_p, g);
8470 *expr_p = NULL_TREE;
8473 /* Gimplify the contents of an OMP_TASK statement. This involves
8474 gimplification of the body, as well as scanning the body for used
8475 variables. We need to do this scan now, because variable-sized
8476 decls will be decomposed during gimplification. */
8478 static void
8479 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
8481 tree expr = *expr_p;
8482 gimple *g;
8483 gimple_seq body = NULL;
8485 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
8486 find_omp_clause (OMP_TASK_CLAUSES (expr),
8487 OMP_CLAUSE_UNTIED)
8488 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
8490 push_gimplify_context ();
8492 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
8493 if (gimple_code (g) == GIMPLE_BIND)
8494 pop_gimplify_context (g);
8495 else
8496 pop_gimplify_context (NULL);
8498 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
8499 OMP_TASK);
8501 g = gimple_build_omp_task (body,
8502 OMP_TASK_CLAUSES (expr),
8503 NULL_TREE, NULL_TREE,
8504 NULL_TREE, NULL_TREE, NULL_TREE);
8505 gimplify_seq_add_stmt (pre_p, g);
8506 *expr_p = NULL_TREE;
8509 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
8510 with non-NULL OMP_FOR_INIT. */
8512 static tree
8513 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
8515 *walk_subtrees = 0;
8516 switch (TREE_CODE (*tp))
8518 case OMP_FOR:
8519 *walk_subtrees = 1;
8520 /* FALLTHRU */
8521 case OMP_SIMD:
8522 if (OMP_FOR_INIT (*tp) != NULL_TREE)
8523 return *tp;
8524 break;
8525 case BIND_EXPR:
8526 case STATEMENT_LIST:
8527 case OMP_PARALLEL:
8528 *walk_subtrees = 1;
8529 break;
8530 default:
8531 break;
8533 return NULL_TREE;
8536 /* Gimplify the gross structure of an OMP_FOR statement. */
8538 static enum gimplify_status
8539 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
8541 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
8542 enum gimplify_status ret = GS_ALL_DONE;
8543 enum gimplify_status tret;
8544 gomp_for *gfor;
8545 gimple_seq for_body, for_pre_body;
8546 int i;
8547 bitmap has_decl_expr = NULL;
8548 enum omp_region_type ort = ORT_WORKSHARE;
8550 orig_for_stmt = for_stmt = *expr_p;
8552 switch (TREE_CODE (for_stmt))
8554 case OMP_FOR:
8555 case CILK_FOR:
8556 case OMP_DISTRIBUTE:
8557 break;
8558 case OACC_LOOP:
8559 ort = ORT_ACC;
8560 break;
8561 case OMP_TASKLOOP:
8562 if (find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
8563 ort = ORT_UNTIED_TASK;
8564 else
8565 ort = ORT_TASK;
8566 break;
8567 case OMP_SIMD:
8568 case CILK_SIMD:
8569 ort = ORT_SIMD;
8570 break;
8571 default:
8572 gcc_unreachable ();
8575 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
8576 clause for the IV. */
8577 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8579 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
8580 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8581 decl = TREE_OPERAND (t, 0);
8582 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8583 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8584 && OMP_CLAUSE_DECL (c) == decl)
8586 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8587 break;
8591 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8593 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
8594 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
8595 find_combined_omp_for, NULL, NULL);
8596 if (inner_for_stmt == NULL_TREE)
8598 gcc_assert (seen_error ());
8599 *expr_p = NULL_TREE;
8600 return GS_ERROR;
8604 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
8605 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
8606 TREE_CODE (for_stmt));
8608 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
8609 gimplify_omp_ctxp->distribute = true;
8611 /* Handle OMP_FOR_INIT. */
8612 for_pre_body = NULL;
8613 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
8615 has_decl_expr = BITMAP_ALLOC (NULL);
8616 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
8617 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
8618 == VAR_DECL)
8620 t = OMP_FOR_PRE_BODY (for_stmt);
8621 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8623 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
8625 tree_stmt_iterator si;
8626 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
8627 tsi_next (&si))
8629 t = tsi_stmt (si);
8630 if (TREE_CODE (t) == DECL_EXPR
8631 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
8632 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8636 if (OMP_FOR_PRE_BODY (for_stmt))
8638 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
8639 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8640 else
8642 struct gimplify_omp_ctx ctx;
8643 memset (&ctx, 0, sizeof (ctx));
8644 ctx.region_type = ORT_NONE;
8645 gimplify_omp_ctxp = &ctx;
8646 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8647 gimplify_omp_ctxp = NULL;
8650 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
8652 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8653 for_stmt = inner_for_stmt;
8655 /* For taskloop, need to gimplify the start, end and step before the
8656 taskloop, outside of the taskloop omp context. */
8657 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8659 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8661 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8662 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8664 TREE_OPERAND (t, 1)
8665 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8666 pre_p, NULL, false);
8667 tree c = build_omp_clause (input_location,
8668 OMP_CLAUSE_FIRSTPRIVATE);
8669 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8670 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8671 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8674 /* Handle OMP_FOR_COND. */
8675 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8676 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8678 TREE_OPERAND (t, 1)
8679 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8680 gimple_seq_empty_p (for_pre_body)
8681 ? pre_p : &for_pre_body, NULL,
8682 false);
8683 tree c = build_omp_clause (input_location,
8684 OMP_CLAUSE_FIRSTPRIVATE);
8685 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8686 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8687 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8690 /* Handle OMP_FOR_INCR. */
8691 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8692 if (TREE_CODE (t) == MODIFY_EXPR)
8694 decl = TREE_OPERAND (t, 0);
8695 t = TREE_OPERAND (t, 1);
8696 tree *tp = &TREE_OPERAND (t, 1);
8697 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
8698 tp = &TREE_OPERAND (t, 0);
8700 if (!is_gimple_constant (*tp))
8702 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
8703 ? pre_p : &for_pre_body;
8704 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
8705 tree c = build_omp_clause (input_location,
8706 OMP_CLAUSE_FIRSTPRIVATE);
8707 OMP_CLAUSE_DECL (c) = *tp;
8708 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8709 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8714 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
8715 OMP_TASKLOOP);
8718 if (orig_for_stmt != for_stmt)
8719 gimplify_omp_ctxp->combined_loop = true;
8721 for_body = NULL;
8722 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8723 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
8724 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8725 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
8727 tree c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
8728 bool is_doacross = false;
8729 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
8731 is_doacross = true;
8732 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
8733 (OMP_FOR_INIT (for_stmt))
8734 * 2);
8736 int collapse = 1;
8737 c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
8738 if (c)
8739 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
8740 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8742 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8743 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8744 decl = TREE_OPERAND (t, 0);
8745 gcc_assert (DECL_P (decl));
8746 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
8747 || POINTER_TYPE_P (TREE_TYPE (decl)));
8748 if (is_doacross)
8750 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
8751 gimplify_omp_ctxp->loop_iter_var.quick_push
8752 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
8753 else
8754 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8755 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8758 /* Make sure the iteration variable is private. */
8759 tree c = NULL_TREE;
8760 tree c2 = NULL_TREE;
8761 if (orig_for_stmt != for_stmt)
8762 /* Do this only on innermost construct for combined ones. */;
8763 else if (ort == ORT_SIMD)
8765 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
8766 (splay_tree_key) decl);
8767 omp_is_private (gimplify_omp_ctxp, decl,
8768 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8769 != 1));
8770 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8771 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8772 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8774 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8775 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8776 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
8777 if ((has_decl_expr
8778 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8779 || omp_no_lastprivate (gimplify_omp_ctxp))
8781 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8782 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8784 struct gimplify_omp_ctx *outer
8785 = gimplify_omp_ctxp->outer_context;
8786 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8788 if (outer->region_type == ORT_WORKSHARE
8789 && outer->combined_loop)
8791 n = splay_tree_lookup (outer->variables,
8792 (splay_tree_key)decl);
8793 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8795 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8796 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8798 else
8800 struct gimplify_omp_ctx *octx = outer->outer_context;
8801 if (octx
8802 && octx->region_type == ORT_COMBINED_PARALLEL
8803 && octx->outer_context
8804 && (octx->outer_context->region_type
8805 == ORT_WORKSHARE)
8806 && octx->outer_context->combined_loop)
8808 octx = octx->outer_context;
8809 n = splay_tree_lookup (octx->variables,
8810 (splay_tree_key)decl);
8811 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8813 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8814 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8821 OMP_CLAUSE_DECL (c) = decl;
8822 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8823 OMP_FOR_CLAUSES (for_stmt) = c;
8824 omp_add_variable (gimplify_omp_ctxp, decl, flags);
8825 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8827 if (outer->region_type == ORT_WORKSHARE
8828 && outer->combined_loop)
8830 if (outer->outer_context
8831 && (outer->outer_context->region_type
8832 == ORT_COMBINED_PARALLEL))
8833 outer = outer->outer_context;
8834 else if (omp_check_private (outer, decl, false))
8835 outer = NULL;
8837 else if (((outer->region_type & ORT_TASK) != 0)
8838 && outer->combined_loop
8839 && !omp_check_private (gimplify_omp_ctxp,
8840 decl, false))
8842 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8844 omp_notice_variable (outer, decl, true);
8845 outer = NULL;
8847 if (outer)
8849 n = splay_tree_lookup (outer->variables,
8850 (splay_tree_key)decl);
8851 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8853 omp_add_variable (outer, decl,
8854 GOVD_LASTPRIVATE | GOVD_SEEN);
8855 if (outer->region_type == ORT_COMBINED_PARALLEL
8856 && outer->outer_context
8857 && (outer->outer_context->region_type
8858 == ORT_WORKSHARE)
8859 && outer->outer_context->combined_loop)
8861 outer = outer->outer_context;
8862 n = splay_tree_lookup (outer->variables,
8863 (splay_tree_key)decl);
8864 if (omp_check_private (outer, decl, false))
8865 outer = NULL;
8866 else if (n == NULL
8867 || ((n->value & GOVD_DATA_SHARE_CLASS)
8868 == 0))
8869 omp_add_variable (outer, decl,
8870 GOVD_LASTPRIVATE
8871 | GOVD_SEEN);
8872 else
8873 outer = NULL;
8875 if (outer && outer->outer_context
8876 && (outer->outer_context->region_type
8877 == ORT_COMBINED_TEAMS))
8879 outer = outer->outer_context;
8880 n = splay_tree_lookup (outer->variables,
8881 (splay_tree_key)decl);
8882 if (n == NULL
8883 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8884 omp_add_variable (outer, decl,
8885 GOVD_SHARED | GOVD_SEEN);
8886 else
8887 outer = NULL;
8889 if (outer && outer->outer_context)
8890 omp_notice_variable (outer->outer_context, decl,
8891 true);
8896 else
8898 bool lastprivate
8899 = (!has_decl_expr
8900 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8901 && !omp_no_lastprivate (gimplify_omp_ctxp);
8902 struct gimplify_omp_ctx *outer
8903 = gimplify_omp_ctxp->outer_context;
8904 if (outer && lastprivate)
8906 if (outer->region_type == ORT_WORKSHARE
8907 && outer->combined_loop)
8909 n = splay_tree_lookup (outer->variables,
8910 (splay_tree_key)decl);
8911 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8913 lastprivate = false;
8914 outer = NULL;
8916 else if (outer->outer_context
8917 && (outer->outer_context->region_type
8918 == ORT_COMBINED_PARALLEL))
8919 outer = outer->outer_context;
8920 else if (omp_check_private (outer, decl, false))
8921 outer = NULL;
8923 else if (((outer->region_type & ORT_TASK) != 0)
8924 && outer->combined_loop
8925 && !omp_check_private (gimplify_omp_ctxp,
8926 decl, false))
8928 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8930 omp_notice_variable (outer, decl, true);
8931 outer = NULL;
8933 if (outer)
8935 n = splay_tree_lookup (outer->variables,
8936 (splay_tree_key)decl);
8937 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8939 omp_add_variable (outer, decl,
8940 GOVD_LASTPRIVATE | GOVD_SEEN);
8941 if (outer->region_type == ORT_COMBINED_PARALLEL
8942 && outer->outer_context
8943 && (outer->outer_context->region_type
8944 == ORT_WORKSHARE)
8945 && outer->outer_context->combined_loop)
8947 outer = outer->outer_context;
8948 n = splay_tree_lookup (outer->variables,
8949 (splay_tree_key)decl);
8950 if (omp_check_private (outer, decl, false))
8951 outer = NULL;
8952 else if (n == NULL
8953 || ((n->value & GOVD_DATA_SHARE_CLASS)
8954 == 0))
8955 omp_add_variable (outer, decl,
8956 GOVD_LASTPRIVATE
8957 | GOVD_SEEN);
8958 else
8959 outer = NULL;
8961 if (outer && outer->outer_context
8962 && (outer->outer_context->region_type
8963 == ORT_COMBINED_TEAMS))
8965 outer = outer->outer_context;
8966 n = splay_tree_lookup (outer->variables,
8967 (splay_tree_key)decl);
8968 if (n == NULL
8969 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8970 omp_add_variable (outer, decl,
8971 GOVD_SHARED | GOVD_SEEN);
8972 else
8973 outer = NULL;
8975 if (outer && outer->outer_context)
8976 omp_notice_variable (outer->outer_context, decl,
8977 true);
8982 c = build_omp_clause (input_location,
8983 lastprivate ? OMP_CLAUSE_LASTPRIVATE
8984 : OMP_CLAUSE_PRIVATE);
8985 OMP_CLAUSE_DECL (c) = decl;
8986 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8987 OMP_FOR_CLAUSES (for_stmt) = c;
8988 omp_add_variable (gimplify_omp_ctxp, decl,
8989 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
8990 | GOVD_EXPLICIT | GOVD_SEEN);
8991 c = NULL_TREE;
8994 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
8995 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8996 else
8997 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
8999 /* If DECL is not a gimple register, create a temporary variable to act
9000 as an iteration counter. This is valid, since DECL cannot be
9001 modified in the body of the loop. Similarly for any iteration vars
9002 in simd with collapse > 1 where the iterator vars must be
9003 lastprivate. */
9004 if (orig_for_stmt != for_stmt)
9005 var = decl;
9006 else if (!is_gimple_reg (decl)
9007 || (ort == ORT_SIMD
9008 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9010 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9011 TREE_OPERAND (t, 0) = var;
9013 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9015 if (ort == ORT_SIMD
9016 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9018 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9019 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9020 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9021 OMP_CLAUSE_DECL (c2) = var;
9022 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9023 OMP_FOR_CLAUSES (for_stmt) = c2;
9024 omp_add_variable (gimplify_omp_ctxp, var,
9025 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
9026 if (c == NULL_TREE)
9028 c = c2;
9029 c2 = NULL_TREE;
9032 else
9033 omp_add_variable (gimplify_omp_ctxp, var,
9034 GOVD_PRIVATE | GOVD_SEEN);
9036 else
9037 var = decl;
9039 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9040 is_gimple_val, fb_rvalue, false);
9041 ret = MIN (ret, tret);
9042 if (ret == GS_ERROR)
9043 return ret;
9045 /* Handle OMP_FOR_COND. */
9046 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9047 gcc_assert (COMPARISON_CLASS_P (t));
9048 gcc_assert (TREE_OPERAND (t, 0) == decl);
9050 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9051 is_gimple_val, fb_rvalue, false);
9052 ret = MIN (ret, tret);
9054 /* Handle OMP_FOR_INCR. */
9055 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9056 switch (TREE_CODE (t))
9058 case PREINCREMENT_EXPR:
9059 case POSTINCREMENT_EXPR:
9061 tree decl = TREE_OPERAND (t, 0);
9062 /* c_omp_for_incr_canonicalize_ptr() should have been
9063 called to massage things appropriately. */
9064 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
9066 if (orig_for_stmt != for_stmt)
9067 break;
9068 t = build_int_cst (TREE_TYPE (decl), 1);
9069 if (c)
9070 OMP_CLAUSE_LINEAR_STEP (c) = t;
9071 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
9072 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
9073 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
9074 break;
9077 case PREDECREMENT_EXPR:
9078 case POSTDECREMENT_EXPR:
9079 /* c_omp_for_incr_canonicalize_ptr() should have been
9080 called to massage things appropriately. */
9081 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
9082 if (orig_for_stmt != for_stmt)
9083 break;
9084 t = build_int_cst (TREE_TYPE (decl), -1);
9085 if (c)
9086 OMP_CLAUSE_LINEAR_STEP (c) = t;
9087 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
9088 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
9089 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
9090 break;
9092 case MODIFY_EXPR:
9093 gcc_assert (TREE_OPERAND (t, 0) == decl);
9094 TREE_OPERAND (t, 0) = var;
9096 t = TREE_OPERAND (t, 1);
9097 switch (TREE_CODE (t))
9099 case PLUS_EXPR:
9100 if (TREE_OPERAND (t, 1) == decl)
9102 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
9103 TREE_OPERAND (t, 0) = var;
9104 break;
9107 /* Fallthru. */
9108 case MINUS_EXPR:
9109 case POINTER_PLUS_EXPR:
9110 gcc_assert (TREE_OPERAND (t, 0) == decl);
9111 TREE_OPERAND (t, 0) = var;
9112 break;
9113 default:
9114 gcc_unreachable ();
9117 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9118 is_gimple_val, fb_rvalue, false);
9119 ret = MIN (ret, tret);
9120 if (c)
9122 tree step = TREE_OPERAND (t, 1);
9123 tree stept = TREE_TYPE (decl);
9124 if (POINTER_TYPE_P (stept))
9125 stept = sizetype;
9126 step = fold_convert (stept, step);
9127 if (TREE_CODE (t) == MINUS_EXPR)
9128 step = fold_build1 (NEGATE_EXPR, stept, step);
9129 OMP_CLAUSE_LINEAR_STEP (c) = step;
9130 if (step != TREE_OPERAND (t, 1))
9132 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
9133 &for_pre_body, NULL,
9134 is_gimple_val, fb_rvalue, false);
9135 ret = MIN (ret, tret);
9138 break;
9140 default:
9141 gcc_unreachable ();
9144 if (c2)
9146 gcc_assert (c);
9147 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
9150 if ((var != decl || collapse > 1) && orig_for_stmt == for_stmt)
9152 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
9153 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9154 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
9155 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9156 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
9157 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
9158 && OMP_CLAUSE_DECL (c) == decl)
9160 if (is_doacross && (collapse == 1 || i >= collapse))
9161 t = var;
9162 else
9164 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9165 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9166 gcc_assert (TREE_OPERAND (t, 0) == var);
9167 t = TREE_OPERAND (t, 1);
9168 gcc_assert (TREE_CODE (t) == PLUS_EXPR
9169 || TREE_CODE (t) == MINUS_EXPR
9170 || TREE_CODE (t) == POINTER_PLUS_EXPR);
9171 gcc_assert (TREE_OPERAND (t, 0) == var);
9172 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
9173 is_doacross ? var : decl,
9174 TREE_OPERAND (t, 1));
9176 gimple_seq *seq;
9177 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9178 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
9179 else
9180 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
9181 gimplify_assign (decl, t, seq);
9186 BITMAP_FREE (has_decl_expr);
9188 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9190 push_gimplify_context ();
9191 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
9193 OMP_FOR_BODY (orig_for_stmt)
9194 = build3 (BIND_EXPR, void_type_node, NULL,
9195 OMP_FOR_BODY (orig_for_stmt), NULL);
9196 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
9200 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
9201 &for_body);
9203 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9205 if (gimple_code (g) == GIMPLE_BIND)
9206 pop_gimplify_context (g);
9207 else
9208 pop_gimplify_context (NULL);
9211 if (orig_for_stmt != for_stmt)
9212 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9214 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9215 decl = TREE_OPERAND (t, 0);
9216 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9217 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9218 gimplify_omp_ctxp = ctx->outer_context;
9219 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9220 gimplify_omp_ctxp = ctx;
9221 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
9222 TREE_OPERAND (t, 0) = var;
9223 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9224 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
9225 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
9228 gimplify_adjust_omp_clauses (pre_p, for_body,
9229 &OMP_FOR_CLAUSES (orig_for_stmt),
9230 TREE_CODE (orig_for_stmt));
9232 int kind;
9233 switch (TREE_CODE (orig_for_stmt))
9235 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
9236 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
9237 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
9238 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
9239 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
9240 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
9241 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
9242 default:
9243 gcc_unreachable ();
9245 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
9246 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
9247 for_pre_body);
9248 if (orig_for_stmt != for_stmt)
9249 gimple_omp_for_set_combined_p (gfor, true);
9250 if (gimplify_omp_ctxp
9251 && (gimplify_omp_ctxp->combined_loop
9252 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9253 && gimplify_omp_ctxp->outer_context
9254 && gimplify_omp_ctxp->outer_context->combined_loop)))
9256 gimple_omp_for_set_combined_into_p (gfor, true);
9257 if (gimplify_omp_ctxp->combined_loop)
9258 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
9259 else
9260 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
9263 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9265 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9266 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
9267 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
9268 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9269 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
9270 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
9271 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9272 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
9275 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
9276 constructs with GIMPLE_OMP_TASK sandwiched in between them.
9277 The outer taskloop stands for computing the number of iterations,
9278 counts for collapsed loops and holding taskloop specific clauses.
9279 The task construct stands for the effect of data sharing on the
9280 explicit task it creates and the inner taskloop stands for expansion
9281 of the static loop inside of the explicit task construct. */
9282 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9284 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
9285 tree task_clauses = NULL_TREE;
9286 tree c = *gfor_clauses_ptr;
9287 tree *gtask_clauses_ptr = &task_clauses;
9288 tree outer_for_clauses = NULL_TREE;
9289 tree *gforo_clauses_ptr = &outer_for_clauses;
9290 for (; c; c = OMP_CLAUSE_CHAIN (c))
9291 switch (OMP_CLAUSE_CODE (c))
9293 /* These clauses are allowed on task, move them there. */
9294 case OMP_CLAUSE_SHARED:
9295 case OMP_CLAUSE_FIRSTPRIVATE:
9296 case OMP_CLAUSE_DEFAULT:
9297 case OMP_CLAUSE_IF:
9298 case OMP_CLAUSE_UNTIED:
9299 case OMP_CLAUSE_FINAL:
9300 case OMP_CLAUSE_MERGEABLE:
9301 case OMP_CLAUSE_PRIORITY:
9302 *gtask_clauses_ptr = c;
9303 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9304 break;
9305 case OMP_CLAUSE_PRIVATE:
9306 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
9308 /* We want private on outer for and firstprivate
9309 on task. */
9310 *gtask_clauses_ptr
9311 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9312 OMP_CLAUSE_FIRSTPRIVATE);
9313 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9314 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9315 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9316 *gforo_clauses_ptr = c;
9317 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9319 else
9321 *gtask_clauses_ptr = c;
9322 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9324 break;
9325 /* These clauses go into outer taskloop clauses. */
9326 case OMP_CLAUSE_GRAINSIZE:
9327 case OMP_CLAUSE_NUM_TASKS:
9328 case OMP_CLAUSE_NOGROUP:
9329 *gforo_clauses_ptr = c;
9330 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9331 break;
9332 /* Taskloop clause we duplicate on both taskloops. */
9333 case OMP_CLAUSE_COLLAPSE:
9334 *gfor_clauses_ptr = c;
9335 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9336 *gforo_clauses_ptr = copy_node (c);
9337 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9338 break;
9339 /* For lastprivate, keep the clause on inner taskloop, and add
9340 a shared clause on task. If the same decl is also firstprivate,
9341 add also firstprivate clause on the inner taskloop. */
9342 case OMP_CLAUSE_LASTPRIVATE:
9343 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
9345 /* For taskloop C++ lastprivate IVs, we want:
9346 1) private on outer taskloop
9347 2) firstprivate and shared on task
9348 3) lastprivate on inner taskloop */
9349 *gtask_clauses_ptr
9350 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9351 OMP_CLAUSE_FIRSTPRIVATE);
9352 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9353 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9354 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9355 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
9356 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9357 OMP_CLAUSE_PRIVATE);
9358 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
9359 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
9360 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
9361 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9363 *gfor_clauses_ptr = c;
9364 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9365 *gtask_clauses_ptr
9366 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
9367 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9368 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9369 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
9370 gtask_clauses_ptr
9371 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9372 break;
9373 default:
9374 gcc_unreachable ();
9376 *gfor_clauses_ptr = NULL_TREE;
9377 *gtask_clauses_ptr = NULL_TREE;
9378 *gforo_clauses_ptr = NULL_TREE;
9379 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
9380 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
9381 NULL_TREE, NULL_TREE, NULL_TREE);
9382 gimple_omp_task_set_taskloop_p (g, true);
9383 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
9384 gomp_for *gforo
9385 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
9386 gimple_omp_for_collapse (gfor),
9387 gimple_omp_for_pre_body (gfor));
9388 gimple_omp_for_set_pre_body (gfor, NULL);
9389 gimple_omp_for_set_combined_p (gforo, true);
9390 gimple_omp_for_set_combined_into_p (gfor, true);
9391 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
9393 t = unshare_expr (gimple_omp_for_index (gfor, i));
9394 gimple_omp_for_set_index (gforo, i, t);
9395 t = unshare_expr (gimple_omp_for_initial (gfor, i));
9396 gimple_omp_for_set_initial (gforo, i, t);
9397 gimple_omp_for_set_cond (gforo, i,
9398 gimple_omp_for_cond (gfor, i));
9399 t = unshare_expr (gimple_omp_for_final (gfor, i));
9400 gimple_omp_for_set_final (gforo, i, t);
9401 t = unshare_expr (gimple_omp_for_incr (gfor, i));
9402 gimple_omp_for_set_incr (gforo, i, t);
9404 gimplify_seq_add_stmt (pre_p, gforo);
9406 else
9407 gimplify_seq_add_stmt (pre_p, gfor);
9408 if (ret != GS_ALL_DONE)
9409 return GS_ERROR;
9410 *expr_p = NULL_TREE;
9411 return GS_ALL_DONE;
9414 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
9415 of OMP_TARGET's body. */
9417 static tree
9418 find_omp_teams (tree *tp, int *walk_subtrees, void *)
9420 *walk_subtrees = 0;
9421 switch (TREE_CODE (*tp))
9423 case OMP_TEAMS:
9424 return *tp;
9425 case BIND_EXPR:
9426 case STATEMENT_LIST:
9427 *walk_subtrees = 1;
9428 break;
9429 default:
9430 break;
9432 return NULL_TREE;
9435 /* Helper function of optimize_target_teams, determine if the expression
9436 can be computed safely before the target construct on the host. */
9438 static tree
9439 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
9441 splay_tree_node n;
9443 if (TYPE_P (*tp))
9445 *walk_subtrees = 0;
9446 return NULL_TREE;
9448 switch (TREE_CODE (*tp))
9450 case VAR_DECL:
9451 case PARM_DECL:
9452 case RESULT_DECL:
9453 *walk_subtrees = 0;
9454 if (error_operand_p (*tp)
9455 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
9456 || DECL_HAS_VALUE_EXPR_P (*tp)
9457 || DECL_THREAD_LOCAL_P (*tp)
9458 || TREE_SIDE_EFFECTS (*tp)
9459 || TREE_THIS_VOLATILE (*tp))
9460 return *tp;
9461 if (is_global_var (*tp)
9462 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
9463 || lookup_attribute ("omp declare target link",
9464 DECL_ATTRIBUTES (*tp))))
9465 return *tp;
9466 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9467 (splay_tree_key) *tp);
9468 if (n == NULL)
9470 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
9471 return NULL_TREE;
9472 return *tp;
9474 else if (n->value & GOVD_LOCAL)
9475 return *tp;
9476 else if (n->value & GOVD_FIRSTPRIVATE)
9477 return NULL_TREE;
9478 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9479 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9480 return NULL_TREE;
9481 return *tp;
9482 case INTEGER_CST:
9483 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9484 return *tp;
9485 return NULL_TREE;
9486 case TARGET_EXPR:
9487 if (TARGET_EXPR_INITIAL (*tp)
9488 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
9489 return *tp;
9490 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
9491 walk_subtrees, NULL);
9492 /* Allow some reasonable subset of integral arithmetics. */
9493 case PLUS_EXPR:
9494 case MINUS_EXPR:
9495 case MULT_EXPR:
9496 case TRUNC_DIV_EXPR:
9497 case CEIL_DIV_EXPR:
9498 case FLOOR_DIV_EXPR:
9499 case ROUND_DIV_EXPR:
9500 case TRUNC_MOD_EXPR:
9501 case CEIL_MOD_EXPR:
9502 case FLOOR_MOD_EXPR:
9503 case ROUND_MOD_EXPR:
9504 case RDIV_EXPR:
9505 case EXACT_DIV_EXPR:
9506 case MIN_EXPR:
9507 case MAX_EXPR:
9508 case LSHIFT_EXPR:
9509 case RSHIFT_EXPR:
9510 case BIT_IOR_EXPR:
9511 case BIT_XOR_EXPR:
9512 case BIT_AND_EXPR:
9513 case NEGATE_EXPR:
9514 case ABS_EXPR:
9515 case BIT_NOT_EXPR:
9516 case NON_LVALUE_EXPR:
9517 CASE_CONVERT:
9518 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9519 return *tp;
9520 return NULL_TREE;
9521 /* And disallow anything else, except for comparisons. */
9522 default:
9523 if (COMPARISON_CLASS_P (*tp))
9524 return NULL_TREE;
9525 return *tp;
9529 /* Try to determine if the num_teams and/or thread_limit expressions
9530 can have their values determined already before entering the
9531 target construct.
9532 INTEGER_CSTs trivially are,
9533 integral decls that are firstprivate (explicitly or implicitly)
9534 or explicitly map(always, to:) or map(always, tofrom:) on the target
9535 region too, and expressions involving simple arithmetics on those
9536 too, function calls are not ok, dereferencing something neither etc.
9537 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
9538 EXPR based on what we find:
9539 0 stands for clause not specified at all, use implementation default
9540 -1 stands for value that can't be determined easily before entering
9541 the target construct.
9542 If teams construct is not present at all, use 1 for num_teams
9543 and 0 for thread_limit (only one team is involved, and the thread
9544 limit is implementation defined. */
9546 static void
9547 optimize_target_teams (tree target, gimple_seq *pre_p)
9549 tree body = OMP_BODY (target);
9550 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
9551 tree num_teams = integer_zero_node;
9552 tree thread_limit = integer_zero_node;
9553 location_t num_teams_loc = EXPR_LOCATION (target);
9554 location_t thread_limit_loc = EXPR_LOCATION (target);
9555 tree c, *p, expr;
9556 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
9558 if (teams == NULL_TREE)
9559 num_teams = integer_one_node;
9560 else
9561 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
9563 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
9565 p = &num_teams;
9566 num_teams_loc = OMP_CLAUSE_LOCATION (c);
9568 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
9570 p = &thread_limit;
9571 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
9573 else
9574 continue;
9575 expr = OMP_CLAUSE_OPERAND (c, 0);
9576 if (TREE_CODE (expr) == INTEGER_CST)
9578 *p = expr;
9579 continue;
9581 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
9583 *p = integer_minus_one_node;
9584 continue;
9586 *p = expr;
9587 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
9588 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
9589 == GS_ERROR)
9591 gimplify_omp_ctxp = target_ctx;
9592 *p = integer_minus_one_node;
9593 continue;
9595 gimplify_omp_ctxp = target_ctx;
9596 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
9597 OMP_CLAUSE_OPERAND (c, 0) = *p;
9599 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
9600 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
9601 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9602 OMP_TARGET_CLAUSES (target) = c;
9603 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
9604 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
9605 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9606 OMP_TARGET_CLAUSES (target) = c;
9609 /* Gimplify the gross structure of several OMP constructs. */
9611 static void
9612 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
9614 tree expr = *expr_p;
9615 gimple *stmt;
9616 gimple_seq body = NULL;
9617 enum omp_region_type ort;
9619 switch (TREE_CODE (expr))
9621 case OMP_SECTIONS:
9622 case OMP_SINGLE:
9623 ort = ORT_WORKSHARE;
9624 break;
9625 case OMP_TARGET:
9626 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
9627 break;
9628 case OACC_KERNELS:
9629 ort = ORT_ACC_KERNELS;
9630 break;
9631 case OACC_PARALLEL:
9632 ort = ORT_ACC_PARALLEL;
9633 break;
9634 case OACC_DATA:
9635 ort = ORT_ACC_DATA;
9636 break;
9637 case OMP_TARGET_DATA:
9638 ort = ORT_TARGET_DATA;
9639 break;
9640 case OMP_TEAMS:
9641 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
9642 break;
9643 case OACC_HOST_DATA:
9644 ort = ORT_ACC_HOST_DATA;
9645 break;
9646 default:
9647 gcc_unreachable ();
9649 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
9650 TREE_CODE (expr));
9651 if (TREE_CODE (expr) == OMP_TARGET)
9652 optimize_target_teams (expr, pre_p);
9653 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
9655 push_gimplify_context ();
9656 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
9657 if (gimple_code (g) == GIMPLE_BIND)
9658 pop_gimplify_context (g);
9659 else
9660 pop_gimplify_context (NULL);
9661 if ((ort & ORT_TARGET_DATA) != 0)
9663 enum built_in_function end_ix;
9664 switch (TREE_CODE (expr))
9666 case OACC_DATA:
9667 case OACC_HOST_DATA:
9668 end_ix = BUILT_IN_GOACC_DATA_END;
9669 break;
9670 case OMP_TARGET_DATA:
9671 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
9672 break;
9673 default:
9674 gcc_unreachable ();
9676 tree fn = builtin_decl_explicit (end_ix);
9677 g = gimple_build_call (fn, 0);
9678 gimple_seq cleanup = NULL;
9679 gimple_seq_add_stmt (&cleanup, g);
9680 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
9681 body = NULL;
9682 gimple_seq_add_stmt (&body, g);
9685 else
9686 gimplify_and_add (OMP_BODY (expr), &body);
9687 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
9688 TREE_CODE (expr));
9690 switch (TREE_CODE (expr))
9692 case OACC_DATA:
9693 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
9694 OMP_CLAUSES (expr));
9695 break;
9696 case OACC_KERNELS:
9697 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
9698 OMP_CLAUSES (expr));
9699 break;
9700 case OACC_HOST_DATA:
9701 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
9702 OMP_CLAUSES (expr));
9703 break;
9704 case OACC_PARALLEL:
9705 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
9706 OMP_CLAUSES (expr));
9707 break;
9708 case OMP_SECTIONS:
9709 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
9710 break;
9711 case OMP_SINGLE:
9712 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
9713 break;
9714 case OMP_TARGET:
9715 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
9716 OMP_CLAUSES (expr));
9717 break;
9718 case OMP_TARGET_DATA:
9719 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
9720 OMP_CLAUSES (expr));
9721 break;
9722 case OMP_TEAMS:
9723 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
9724 break;
9725 default:
9726 gcc_unreachable ();
9729 gimplify_seq_add_stmt (pre_p, stmt);
9730 *expr_p = NULL_TREE;
9733 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
9734 target update constructs. */
9736 static void
9737 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
9739 tree expr = *expr_p;
9740 int kind;
9741 gomp_target *stmt;
9742 enum omp_region_type ort = ORT_WORKSHARE;
9744 switch (TREE_CODE (expr))
9746 case OACC_ENTER_DATA:
9747 case OACC_EXIT_DATA:
9748 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
9749 ort = ORT_ACC;
9750 break;
9751 case OACC_UPDATE:
9752 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
9753 ort = ORT_ACC;
9754 break;
9755 case OMP_TARGET_UPDATE:
9756 kind = GF_OMP_TARGET_KIND_UPDATE;
9757 break;
9758 case OMP_TARGET_ENTER_DATA:
9759 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
9760 break;
9761 case OMP_TARGET_EXIT_DATA:
9762 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
9763 break;
9764 default:
9765 gcc_unreachable ();
9767 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
9768 ort, TREE_CODE (expr));
9769 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
9770 TREE_CODE (expr));
9771 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
9773 gimplify_seq_add_stmt (pre_p, stmt);
9774 *expr_p = NULL_TREE;
9777 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
9778 stabilized the lhs of the atomic operation as *ADDR. Return true if
9779 EXPR is this stabilized form. */
9781 static bool
9782 goa_lhs_expr_p (tree expr, tree addr)
9784 /* Also include casts to other type variants. The C front end is fond
9785 of adding these for e.g. volatile variables. This is like
9786 STRIP_TYPE_NOPS but includes the main variant lookup. */
9787 STRIP_USELESS_TYPE_CONVERSION (expr);
9789 if (TREE_CODE (expr) == INDIRECT_REF)
9791 expr = TREE_OPERAND (expr, 0);
9792 while (expr != addr
9793 && (CONVERT_EXPR_P (expr)
9794 || TREE_CODE (expr) == NON_LVALUE_EXPR)
9795 && TREE_CODE (expr) == TREE_CODE (addr)
9796 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
9798 expr = TREE_OPERAND (expr, 0);
9799 addr = TREE_OPERAND (addr, 0);
9801 if (expr == addr)
9802 return true;
9803 return (TREE_CODE (addr) == ADDR_EXPR
9804 && TREE_CODE (expr) == ADDR_EXPR
9805 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
9807 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
9808 return true;
9809 return false;
9812 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
9813 expression does not involve the lhs, evaluate it into a temporary.
9814 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
9815 or -1 if an error was encountered. */
9817 static int
9818 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
9819 tree lhs_var)
9821 tree expr = *expr_p;
9822 int saw_lhs;
9824 if (goa_lhs_expr_p (expr, lhs_addr))
9826 *expr_p = lhs_var;
9827 return 1;
9829 if (is_gimple_val (expr))
9830 return 0;
9832 saw_lhs = 0;
9833 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
9835 case tcc_binary:
9836 case tcc_comparison:
9837 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
9838 lhs_var);
9839 case tcc_unary:
9840 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
9841 lhs_var);
9842 break;
9843 case tcc_expression:
9844 switch (TREE_CODE (expr))
9846 case TRUTH_ANDIF_EXPR:
9847 case TRUTH_ORIF_EXPR:
9848 case TRUTH_AND_EXPR:
9849 case TRUTH_OR_EXPR:
9850 case TRUTH_XOR_EXPR:
9851 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
9852 lhs_addr, lhs_var);
9853 case TRUTH_NOT_EXPR:
9854 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
9855 lhs_addr, lhs_var);
9856 break;
9857 case COMPOUND_EXPR:
9858 /* Break out any preevaluations from cp_build_modify_expr. */
9859 for (; TREE_CODE (expr) == COMPOUND_EXPR;
9860 expr = TREE_OPERAND (expr, 1))
9861 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
9862 *expr_p = expr;
9863 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
9864 default:
9865 break;
9867 break;
9868 default:
9869 break;
9872 if (saw_lhs == 0)
9874 enum gimplify_status gs;
9875 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
9876 if (gs != GS_ALL_DONE)
9877 saw_lhs = -1;
9880 return saw_lhs;
9883 /* Gimplify an OMP_ATOMIC statement. */
9885 static enum gimplify_status
9886 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
9888 tree addr = TREE_OPERAND (*expr_p, 0);
9889 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
9890 ? NULL : TREE_OPERAND (*expr_p, 1);
9891 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
9892 tree tmp_load;
9893 gomp_atomic_load *loadstmt;
9894 gomp_atomic_store *storestmt;
9896 tmp_load = create_tmp_reg (type);
9897 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
9898 return GS_ERROR;
9900 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
9901 != GS_ALL_DONE)
9902 return GS_ERROR;
9904 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
9905 gimplify_seq_add_stmt (pre_p, loadstmt);
9906 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
9907 != GS_ALL_DONE)
9908 return GS_ERROR;
9910 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
9911 rhs = tmp_load;
9912 storestmt = gimple_build_omp_atomic_store (rhs);
9913 gimplify_seq_add_stmt (pre_p, storestmt);
9914 if (OMP_ATOMIC_SEQ_CST (*expr_p))
9916 gimple_omp_atomic_set_seq_cst (loadstmt);
9917 gimple_omp_atomic_set_seq_cst (storestmt);
9919 switch (TREE_CODE (*expr_p))
9921 case OMP_ATOMIC_READ:
9922 case OMP_ATOMIC_CAPTURE_OLD:
9923 *expr_p = tmp_load;
9924 gimple_omp_atomic_set_need_value (loadstmt);
9925 break;
9926 case OMP_ATOMIC_CAPTURE_NEW:
9927 *expr_p = rhs;
9928 gimple_omp_atomic_set_need_value (storestmt);
9929 break;
9930 default:
9931 *expr_p = NULL;
9932 break;
9935 return GS_ALL_DONE;
9938 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
9939 body, and adding some EH bits. */
9941 static enum gimplify_status
9942 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
9944 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
9945 gimple *body_stmt;
9946 gtransaction *trans_stmt;
9947 gimple_seq body = NULL;
9948 int subcode = 0;
9950 /* Wrap the transaction body in a BIND_EXPR so we have a context
9951 where to put decls for OMP. */
9952 if (TREE_CODE (tbody) != BIND_EXPR)
9954 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
9955 TREE_SIDE_EFFECTS (bind) = 1;
9956 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
9957 TRANSACTION_EXPR_BODY (expr) = bind;
9960 push_gimplify_context ();
9961 temp = voidify_wrapper_expr (*expr_p, NULL);
9963 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
9964 pop_gimplify_context (body_stmt);
9966 trans_stmt = gimple_build_transaction (body);
9967 if (TRANSACTION_EXPR_OUTER (expr))
9968 subcode = GTMA_IS_OUTER;
9969 else if (TRANSACTION_EXPR_RELAXED (expr))
9970 subcode = GTMA_IS_RELAXED;
9971 gimple_transaction_set_subcode (trans_stmt, subcode);
9973 gimplify_seq_add_stmt (pre_p, trans_stmt);
9975 if (temp)
9977 *expr_p = temp;
9978 return GS_OK;
9981 *expr_p = NULL_TREE;
9982 return GS_ALL_DONE;
9985 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
9986 is the OMP_BODY of the original EXPR (which has already been
9987 gimplified so it's not present in the EXPR).
9989 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
9991 static gimple *
9992 gimplify_omp_ordered (tree expr, gimple_seq body)
9994 tree c, decls;
9995 int failures = 0;
9996 unsigned int i;
9997 tree source_c = NULL_TREE;
9998 tree sink_c = NULL_TREE;
10000 if (gimplify_omp_ctxp)
10002 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10003 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10004 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10005 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
10006 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
10008 error_at (OMP_CLAUSE_LOCATION (c),
10009 "%<ordered%> construct with %<depend%> clause must be "
10010 "closely nested inside a loop with %<ordered%> clause "
10011 "with a parameter");
10012 failures++;
10014 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10015 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
10017 bool fail = false;
10018 for (decls = OMP_CLAUSE_DECL (c), i = 0;
10019 decls && TREE_CODE (decls) == TREE_LIST;
10020 decls = TREE_CHAIN (decls), ++i)
10021 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
10022 continue;
10023 else if (TREE_VALUE (decls)
10024 != gimplify_omp_ctxp->loop_iter_var[2 * i])
10026 error_at (OMP_CLAUSE_LOCATION (c),
10027 "variable %qE is not an iteration "
10028 "of outermost loop %d, expected %qE",
10029 TREE_VALUE (decls), i + 1,
10030 gimplify_omp_ctxp->loop_iter_var[2 * i]);
10031 fail = true;
10032 failures++;
10034 else
10035 TREE_VALUE (decls)
10036 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
10037 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
10039 error_at (OMP_CLAUSE_LOCATION (c),
10040 "number of variables in %<depend(sink)%> "
10041 "clause does not match number of "
10042 "iteration variables");
10043 failures++;
10045 sink_c = c;
10047 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10048 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
10050 if (source_c)
10052 error_at (OMP_CLAUSE_LOCATION (c),
10053 "more than one %<depend(source)%> clause on an "
10054 "%<ordered%> construct");
10055 failures++;
10057 else
10058 source_c = c;
10061 if (source_c && sink_c)
10063 error_at (OMP_CLAUSE_LOCATION (source_c),
10064 "%<depend(source)%> clause specified together with "
10065 "%<depend(sink:)%> clauses on the same construct");
10066 failures++;
10069 if (failures)
10070 return gimple_build_nop ();
10071 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
10074 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
10075 expression produces a value to be used as an operand inside a GIMPLE
10076 statement, the value will be stored back in *EXPR_P. This value will
10077 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
10078 an SSA_NAME. The corresponding sequence of GIMPLE statements is
10079 emitted in PRE_P and POST_P.
10081 Additionally, this process may overwrite parts of the input
10082 expression during gimplification. Ideally, it should be
10083 possible to do non-destructive gimplification.
10085 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
10086 the expression needs to evaluate to a value to be used as
10087 an operand in a GIMPLE statement, this value will be stored in
10088 *EXPR_P on exit. This happens when the caller specifies one
10089 of fb_lvalue or fb_rvalue fallback flags.
10091 PRE_P will contain the sequence of GIMPLE statements corresponding
10092 to the evaluation of EXPR and all the side-effects that must
10093 be executed before the main expression. On exit, the last
10094 statement of PRE_P is the core statement being gimplified. For
10095 instance, when gimplifying 'if (++a)' the last statement in
10096 PRE_P will be 'if (t.1)' where t.1 is the result of
10097 pre-incrementing 'a'.
10099 POST_P will contain the sequence of GIMPLE statements corresponding
10100 to the evaluation of all the side-effects that must be executed
10101 after the main expression. If this is NULL, the post
10102 side-effects are stored at the end of PRE_P.
10104 The reason why the output is split in two is to handle post
10105 side-effects explicitly. In some cases, an expression may have
10106 inner and outer post side-effects which need to be emitted in
10107 an order different from the one given by the recursive
10108 traversal. For instance, for the expression (*p--)++ the post
10109 side-effects of '--' must actually occur *after* the post
10110 side-effects of '++'. However, gimplification will first visit
10111 the inner expression, so if a separate POST sequence was not
10112 used, the resulting sequence would be:
10114 1 t.1 = *p
10115 2 p = p - 1
10116 3 t.2 = t.1 + 1
10117 4 *p = t.2
10119 However, the post-decrement operation in line #2 must not be
10120 evaluated until after the store to *p at line #4, so the
10121 correct sequence should be:
10123 1 t.1 = *p
10124 2 t.2 = t.1 + 1
10125 3 *p = t.2
10126 4 p = p - 1
10128 So, by specifying a separate post queue, it is possible
10129 to emit the post side-effects in the correct order.
10130 If POST_P is NULL, an internal queue will be used. Before
10131 returning to the caller, the sequence POST_P is appended to
10132 the main output sequence PRE_P.
10134 GIMPLE_TEST_F points to a function that takes a tree T and
10135 returns nonzero if T is in the GIMPLE form requested by the
10136 caller. The GIMPLE predicates are in gimple.c.
10138 FALLBACK tells the function what sort of a temporary we want if
10139 gimplification cannot produce an expression that complies with
10140 GIMPLE_TEST_F.
10142 fb_none means that no temporary should be generated
10143 fb_rvalue means that an rvalue is OK to generate
10144 fb_lvalue means that an lvalue is OK to generate
10145 fb_either means that either is OK, but an lvalue is preferable.
10146 fb_mayfail means that gimplification may fail (in which case
10147 GS_ERROR will be returned)
10149 The return value is either GS_ERROR or GS_ALL_DONE, since this
10150 function iterates until EXPR is completely gimplified or an error
10151 occurs. */
10153 enum gimplify_status
10154 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
10155 bool (*gimple_test_f) (tree), fallback_t fallback)
10157 tree tmp;
10158 gimple_seq internal_pre = NULL;
10159 gimple_seq internal_post = NULL;
10160 tree save_expr;
10161 bool is_statement;
10162 location_t saved_location;
10163 enum gimplify_status ret;
10164 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
10166 save_expr = *expr_p;
10167 if (save_expr == NULL_TREE)
10168 return GS_ALL_DONE;
10170 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
10171 is_statement = gimple_test_f == is_gimple_stmt;
10172 if (is_statement)
10173 gcc_assert (pre_p);
10175 /* Consistency checks. */
10176 if (gimple_test_f == is_gimple_reg)
10177 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
10178 else if (gimple_test_f == is_gimple_val
10179 || gimple_test_f == is_gimple_call_addr
10180 || gimple_test_f == is_gimple_condexpr
10181 || gimple_test_f == is_gimple_mem_rhs
10182 || gimple_test_f == is_gimple_mem_rhs_or_call
10183 || gimple_test_f == is_gimple_reg_rhs
10184 || gimple_test_f == is_gimple_reg_rhs_or_call
10185 || gimple_test_f == is_gimple_asm_val
10186 || gimple_test_f == is_gimple_mem_ref_addr)
10187 gcc_assert (fallback & fb_rvalue);
10188 else if (gimple_test_f == is_gimple_min_lval
10189 || gimple_test_f == is_gimple_lvalue)
10190 gcc_assert (fallback & fb_lvalue);
10191 else if (gimple_test_f == is_gimple_addressable)
10192 gcc_assert (fallback & fb_either);
10193 else if (gimple_test_f == is_gimple_stmt)
10194 gcc_assert (fallback == fb_none);
10195 else
10197 /* We should have recognized the GIMPLE_TEST_F predicate to
10198 know what kind of fallback to use in case a temporary is
10199 needed to hold the value or address of *EXPR_P. */
10200 gcc_unreachable ();
10203 /* We used to check the predicate here and return immediately if it
10204 succeeds. This is wrong; the design is for gimplification to be
10205 idempotent, and for the predicates to only test for valid forms, not
10206 whether they are fully simplified. */
10207 if (pre_p == NULL)
10208 pre_p = &internal_pre;
10210 if (post_p == NULL)
10211 post_p = &internal_post;
10213 /* Remember the last statements added to PRE_P and POST_P. Every
10214 new statement added by the gimplification helpers needs to be
10215 annotated with location information. To centralize the
10216 responsibility, we remember the last statement that had been
10217 added to both queues before gimplifying *EXPR_P. If
10218 gimplification produces new statements in PRE_P and POST_P, those
10219 statements will be annotated with the same location information
10220 as *EXPR_P. */
10221 pre_last_gsi = gsi_last (*pre_p);
10222 post_last_gsi = gsi_last (*post_p);
10224 saved_location = input_location;
10225 if (save_expr != error_mark_node
10226 && EXPR_HAS_LOCATION (*expr_p))
10227 input_location = EXPR_LOCATION (*expr_p);
10229 /* Loop over the specific gimplifiers until the toplevel node
10230 remains the same. */
10233 /* Strip away as many useless type conversions as possible
10234 at the toplevel. */
10235 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
10237 /* Remember the expr. */
10238 save_expr = *expr_p;
10240 /* Die, die, die, my darling. */
10241 if (save_expr == error_mark_node
10242 || (TREE_TYPE (save_expr)
10243 && TREE_TYPE (save_expr) == error_mark_node))
10245 ret = GS_ERROR;
10246 break;
10249 /* Do any language-specific gimplification. */
10250 ret = ((enum gimplify_status)
10251 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
10252 if (ret == GS_OK)
10254 if (*expr_p == NULL_TREE)
10255 break;
10256 if (*expr_p != save_expr)
10257 continue;
10259 else if (ret != GS_UNHANDLED)
10260 break;
10262 /* Make sure that all the cases set 'ret' appropriately. */
10263 ret = GS_UNHANDLED;
10264 switch (TREE_CODE (*expr_p))
10266 /* First deal with the special cases. */
10268 case POSTINCREMENT_EXPR:
10269 case POSTDECREMENT_EXPR:
10270 case PREINCREMENT_EXPR:
10271 case PREDECREMENT_EXPR:
10272 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
10273 fallback != fb_none,
10274 TREE_TYPE (*expr_p));
10275 break;
10277 case VIEW_CONVERT_EXPR:
10278 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
10279 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
10281 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10282 post_p, is_gimple_val, fb_rvalue);
10283 recalculate_side_effects (*expr_p);
10284 break;
10286 /* Fallthru. */
10288 case ARRAY_REF:
10289 case ARRAY_RANGE_REF:
10290 case REALPART_EXPR:
10291 case IMAGPART_EXPR:
10292 case COMPONENT_REF:
10293 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
10294 fallback ? fallback : fb_rvalue);
10295 break;
10297 case COND_EXPR:
10298 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
10300 /* C99 code may assign to an array in a structure value of a
10301 conditional expression, and this has undefined behavior
10302 only on execution, so create a temporary if an lvalue is
10303 required. */
10304 if (fallback == fb_lvalue)
10306 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
10307 mark_addressable (*expr_p);
10308 ret = GS_OK;
10310 break;
10312 case CALL_EXPR:
10313 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
10315 /* C99 code may assign to an array in a structure returned
10316 from a function, and this has undefined behavior only on
10317 execution, so create a temporary if an lvalue is
10318 required. */
10319 if (fallback == fb_lvalue)
10321 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
10322 mark_addressable (*expr_p);
10323 ret = GS_OK;
10325 break;
10327 case TREE_LIST:
10328 gcc_unreachable ();
10330 case COMPOUND_EXPR:
10331 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
10332 break;
10334 case COMPOUND_LITERAL_EXPR:
10335 ret = gimplify_compound_literal_expr (expr_p, pre_p,
10336 gimple_test_f, fallback);
10337 break;
10339 case MODIFY_EXPR:
10340 case INIT_EXPR:
10341 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
10342 fallback != fb_none);
10343 break;
10345 case TRUTH_ANDIF_EXPR:
10346 case TRUTH_ORIF_EXPR:
10348 /* Preserve the original type of the expression and the
10349 source location of the outer expression. */
10350 tree org_type = TREE_TYPE (*expr_p);
10351 *expr_p = gimple_boolify (*expr_p);
10352 *expr_p = build3_loc (input_location, COND_EXPR,
10353 org_type, *expr_p,
10354 fold_convert_loc
10355 (input_location,
10356 org_type, boolean_true_node),
10357 fold_convert_loc
10358 (input_location,
10359 org_type, boolean_false_node));
10360 ret = GS_OK;
10361 break;
10364 case TRUTH_NOT_EXPR:
10366 tree type = TREE_TYPE (*expr_p);
10367 /* The parsers are careful to generate TRUTH_NOT_EXPR
10368 only with operands that are always zero or one.
10369 We do not fold here but handle the only interesting case
10370 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
10371 *expr_p = gimple_boolify (*expr_p);
10372 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
10373 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
10374 TREE_TYPE (*expr_p),
10375 TREE_OPERAND (*expr_p, 0));
10376 else
10377 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
10378 TREE_TYPE (*expr_p),
10379 TREE_OPERAND (*expr_p, 0),
10380 build_int_cst (TREE_TYPE (*expr_p), 1));
10381 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
10382 *expr_p = fold_convert_loc (input_location, type, *expr_p);
10383 ret = GS_OK;
10384 break;
10387 case ADDR_EXPR:
10388 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
10389 break;
10391 case ANNOTATE_EXPR:
10393 tree cond = TREE_OPERAND (*expr_p, 0);
10394 tree kind = TREE_OPERAND (*expr_p, 1);
10395 tree type = TREE_TYPE (cond);
10396 if (!INTEGRAL_TYPE_P (type))
10398 *expr_p = cond;
10399 ret = GS_OK;
10400 break;
10402 tree tmp = create_tmp_var (type);
10403 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
10404 gcall *call
10405 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
10406 gimple_call_set_lhs (call, tmp);
10407 gimplify_seq_add_stmt (pre_p, call);
10408 *expr_p = tmp;
10409 ret = GS_ALL_DONE;
10410 break;
10413 case VA_ARG_EXPR:
10414 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
10415 break;
10417 CASE_CONVERT:
10418 if (IS_EMPTY_STMT (*expr_p))
10420 ret = GS_ALL_DONE;
10421 break;
10424 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
10425 || fallback == fb_none)
10427 /* Just strip a conversion to void (or in void context) and
10428 try again. */
10429 *expr_p = TREE_OPERAND (*expr_p, 0);
10430 ret = GS_OK;
10431 break;
10434 ret = gimplify_conversion (expr_p);
10435 if (ret == GS_ERROR)
10436 break;
10437 if (*expr_p != save_expr)
10438 break;
10439 /* FALLTHRU */
10441 case FIX_TRUNC_EXPR:
10442 /* unary_expr: ... | '(' cast ')' val | ... */
10443 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10444 is_gimple_val, fb_rvalue);
10445 recalculate_side_effects (*expr_p);
10446 break;
10448 case INDIRECT_REF:
10450 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
10451 bool notrap = TREE_THIS_NOTRAP (*expr_p);
10452 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
10454 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
10455 if (*expr_p != save_expr)
10457 ret = GS_OK;
10458 break;
10461 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10462 is_gimple_reg, fb_rvalue);
10463 if (ret == GS_ERROR)
10464 break;
10466 recalculate_side_effects (*expr_p);
10467 *expr_p = fold_build2_loc (input_location, MEM_REF,
10468 TREE_TYPE (*expr_p),
10469 TREE_OPERAND (*expr_p, 0),
10470 build_int_cst (saved_ptr_type, 0));
10471 TREE_THIS_VOLATILE (*expr_p) = volatilep;
10472 TREE_THIS_NOTRAP (*expr_p) = notrap;
10473 ret = GS_OK;
10474 break;
10477 /* We arrive here through the various re-gimplifcation paths. */
10478 case MEM_REF:
10479 /* First try re-folding the whole thing. */
10480 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
10481 TREE_OPERAND (*expr_p, 0),
10482 TREE_OPERAND (*expr_p, 1));
10483 if (tmp)
10485 REF_REVERSE_STORAGE_ORDER (tmp)
10486 = REF_REVERSE_STORAGE_ORDER (*expr_p);
10487 *expr_p = tmp;
10488 recalculate_side_effects (*expr_p);
10489 ret = GS_OK;
10490 break;
10492 /* Avoid re-gimplifying the address operand if it is already
10493 in suitable form. Re-gimplifying would mark the address
10494 operand addressable. Always gimplify when not in SSA form
10495 as we still may have to gimplify decls with value-exprs. */
10496 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
10497 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
10499 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10500 is_gimple_mem_ref_addr, fb_rvalue);
10501 if (ret == GS_ERROR)
10502 break;
10504 recalculate_side_effects (*expr_p);
10505 ret = GS_ALL_DONE;
10506 break;
10508 /* Constants need not be gimplified. */
10509 case INTEGER_CST:
10510 case REAL_CST:
10511 case FIXED_CST:
10512 case STRING_CST:
10513 case COMPLEX_CST:
10514 case VECTOR_CST:
10515 /* Drop the overflow flag on constants, we do not want
10516 that in the GIMPLE IL. */
10517 if (TREE_OVERFLOW_P (*expr_p))
10518 *expr_p = drop_tree_overflow (*expr_p);
10519 ret = GS_ALL_DONE;
10520 break;
10522 case CONST_DECL:
10523 /* If we require an lvalue, such as for ADDR_EXPR, retain the
10524 CONST_DECL node. Otherwise the decl is replaceable by its
10525 value. */
10526 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
10527 if (fallback & fb_lvalue)
10528 ret = GS_ALL_DONE;
10529 else
10531 *expr_p = DECL_INITIAL (*expr_p);
10532 ret = GS_OK;
10534 break;
10536 case DECL_EXPR:
10537 ret = gimplify_decl_expr (expr_p, pre_p);
10538 break;
10540 case BIND_EXPR:
10541 ret = gimplify_bind_expr (expr_p, pre_p);
10542 break;
10544 case LOOP_EXPR:
10545 ret = gimplify_loop_expr (expr_p, pre_p);
10546 break;
10548 case SWITCH_EXPR:
10549 ret = gimplify_switch_expr (expr_p, pre_p);
10550 break;
10552 case EXIT_EXPR:
10553 ret = gimplify_exit_expr (expr_p);
10554 break;
10556 case GOTO_EXPR:
10557 /* If the target is not LABEL, then it is a computed jump
10558 and the target needs to be gimplified. */
10559 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
10561 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
10562 NULL, is_gimple_val, fb_rvalue);
10563 if (ret == GS_ERROR)
10564 break;
10566 gimplify_seq_add_stmt (pre_p,
10567 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
10568 ret = GS_ALL_DONE;
10569 break;
10571 case PREDICT_EXPR:
10572 gimplify_seq_add_stmt (pre_p,
10573 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
10574 PREDICT_EXPR_OUTCOME (*expr_p)));
10575 ret = GS_ALL_DONE;
10576 break;
10578 case LABEL_EXPR:
10579 ret = GS_ALL_DONE;
10580 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
10581 == current_function_decl);
10582 gimplify_seq_add_stmt (pre_p,
10583 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
10584 break;
10586 case CASE_LABEL_EXPR:
10587 ret = gimplify_case_label_expr (expr_p, pre_p);
10588 break;
10590 case RETURN_EXPR:
10591 ret = gimplify_return_expr (*expr_p, pre_p);
10592 break;
10594 case CONSTRUCTOR:
10595 /* Don't reduce this in place; let gimplify_init_constructor work its
10596 magic. Buf if we're just elaborating this for side effects, just
10597 gimplify any element that has side-effects. */
10598 if (fallback == fb_none)
10600 unsigned HOST_WIDE_INT ix;
10601 tree val;
10602 tree temp = NULL_TREE;
10603 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
10604 if (TREE_SIDE_EFFECTS (val))
10605 append_to_statement_list (val, &temp);
10607 *expr_p = temp;
10608 ret = temp ? GS_OK : GS_ALL_DONE;
10610 /* C99 code may assign to an array in a constructed
10611 structure or union, and this has undefined behavior only
10612 on execution, so create a temporary if an lvalue is
10613 required. */
10614 else if (fallback == fb_lvalue)
10616 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
10617 mark_addressable (*expr_p);
10618 ret = GS_OK;
10620 else
10621 ret = GS_ALL_DONE;
10622 break;
10624 /* The following are special cases that are not handled by the
10625 original GIMPLE grammar. */
10627 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
10628 eliminated. */
10629 case SAVE_EXPR:
10630 ret = gimplify_save_expr (expr_p, pre_p, post_p);
10631 break;
10633 case BIT_FIELD_REF:
10634 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10635 post_p, is_gimple_lvalue, fb_either);
10636 recalculate_side_effects (*expr_p);
10637 break;
10639 case TARGET_MEM_REF:
10641 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
10643 if (TMR_BASE (*expr_p))
10644 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
10645 post_p, is_gimple_mem_ref_addr, fb_either);
10646 if (TMR_INDEX (*expr_p))
10647 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
10648 post_p, is_gimple_val, fb_rvalue);
10649 if (TMR_INDEX2 (*expr_p))
10650 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
10651 post_p, is_gimple_val, fb_rvalue);
10652 /* TMR_STEP and TMR_OFFSET are always integer constants. */
10653 ret = MIN (r0, r1);
10655 break;
10657 case NON_LVALUE_EXPR:
10658 /* This should have been stripped above. */
10659 gcc_unreachable ();
10661 case ASM_EXPR:
10662 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
10663 break;
10665 case TRY_FINALLY_EXPR:
10666 case TRY_CATCH_EXPR:
10668 gimple_seq eval, cleanup;
10669 gtry *try_;
10671 /* Calls to destructors are generated automatically in FINALLY/CATCH
10672 block. They should have location as UNKNOWN_LOCATION. However,
10673 gimplify_call_expr will reset these call stmts to input_location
10674 if it finds stmt's location is unknown. To prevent resetting for
10675 destructors, we set the input_location to unknown.
10676 Note that this only affects the destructor calls in FINALLY/CATCH
10677 block, and will automatically reset to its original value by the
10678 end of gimplify_expr. */
10679 input_location = UNKNOWN_LOCATION;
10680 eval = cleanup = NULL;
10681 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
10682 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
10683 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
10684 if (gimple_seq_empty_p (cleanup))
10686 gimple_seq_add_seq (pre_p, eval);
10687 ret = GS_ALL_DONE;
10688 break;
10690 try_ = gimple_build_try (eval, cleanup,
10691 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
10692 ? GIMPLE_TRY_FINALLY
10693 : GIMPLE_TRY_CATCH);
10694 if (EXPR_HAS_LOCATION (save_expr))
10695 gimple_set_location (try_, EXPR_LOCATION (save_expr));
10696 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
10697 gimple_set_location (try_, saved_location);
10698 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
10699 gimple_try_set_catch_is_cleanup (try_,
10700 TRY_CATCH_IS_CLEANUP (*expr_p));
10701 gimplify_seq_add_stmt (pre_p, try_);
10702 ret = GS_ALL_DONE;
10703 break;
10706 case CLEANUP_POINT_EXPR:
10707 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
10708 break;
10710 case TARGET_EXPR:
10711 ret = gimplify_target_expr (expr_p, pre_p, post_p);
10712 break;
10714 case CATCH_EXPR:
10716 gimple *c;
10717 gimple_seq handler = NULL;
10718 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
10719 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
10720 gimplify_seq_add_stmt (pre_p, c);
10721 ret = GS_ALL_DONE;
10722 break;
10725 case EH_FILTER_EXPR:
10727 gimple *ehf;
10728 gimple_seq failure = NULL;
10730 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
10731 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
10732 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
10733 gimplify_seq_add_stmt (pre_p, ehf);
10734 ret = GS_ALL_DONE;
10735 break;
10738 case OBJ_TYPE_REF:
10740 enum gimplify_status r0, r1;
10741 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
10742 post_p, is_gimple_val, fb_rvalue);
10743 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
10744 post_p, is_gimple_val, fb_rvalue);
10745 TREE_SIDE_EFFECTS (*expr_p) = 0;
10746 ret = MIN (r0, r1);
10748 break;
10750 case LABEL_DECL:
10751 /* We get here when taking the address of a label. We mark
10752 the label as "forced"; meaning it can never be removed and
10753 it is a potential target for any computed goto. */
10754 FORCED_LABEL (*expr_p) = 1;
10755 ret = GS_ALL_DONE;
10756 break;
10758 case STATEMENT_LIST:
10759 ret = gimplify_statement_list (expr_p, pre_p);
10760 break;
10762 case WITH_SIZE_EXPR:
10764 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10765 post_p == &internal_post ? NULL : post_p,
10766 gimple_test_f, fallback);
10767 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
10768 is_gimple_val, fb_rvalue);
10769 ret = GS_ALL_DONE;
10771 break;
10773 case VAR_DECL:
10774 case PARM_DECL:
10775 ret = gimplify_var_or_parm_decl (expr_p);
10776 break;
10778 case RESULT_DECL:
10779 /* When within an OMP context, notice uses of variables. */
10780 if (gimplify_omp_ctxp)
10781 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
10782 ret = GS_ALL_DONE;
10783 break;
10785 case SSA_NAME:
10786 /* Allow callbacks into the gimplifier during optimization. */
10787 ret = GS_ALL_DONE;
10788 break;
10790 case OMP_PARALLEL:
10791 gimplify_omp_parallel (expr_p, pre_p);
10792 ret = GS_ALL_DONE;
10793 break;
10795 case OMP_TASK:
10796 gimplify_omp_task (expr_p, pre_p);
10797 ret = GS_ALL_DONE;
10798 break;
10800 case OMP_FOR:
10801 case OMP_SIMD:
10802 case CILK_SIMD:
10803 case CILK_FOR:
10804 case OMP_DISTRIBUTE:
10805 case OMP_TASKLOOP:
10806 case OACC_LOOP:
10807 ret = gimplify_omp_for (expr_p, pre_p);
10808 break;
10810 case OACC_CACHE:
10811 gimplify_oacc_cache (expr_p, pre_p);
10812 ret = GS_ALL_DONE;
10813 break;
10815 case OACC_DECLARE:
10816 gimplify_oacc_declare (expr_p, pre_p);
10817 ret = GS_ALL_DONE;
10818 break;
10820 case OACC_HOST_DATA:
10821 case OACC_DATA:
10822 case OACC_KERNELS:
10823 case OACC_PARALLEL:
10824 case OMP_SECTIONS:
10825 case OMP_SINGLE:
10826 case OMP_TARGET:
10827 case OMP_TARGET_DATA:
10828 case OMP_TEAMS:
10829 gimplify_omp_workshare (expr_p, pre_p);
10830 ret = GS_ALL_DONE;
10831 break;
10833 case OACC_ENTER_DATA:
10834 case OACC_EXIT_DATA:
10835 case OACC_UPDATE:
10836 case OMP_TARGET_UPDATE:
10837 case OMP_TARGET_ENTER_DATA:
10838 case OMP_TARGET_EXIT_DATA:
10839 gimplify_omp_target_update (expr_p, pre_p);
10840 ret = GS_ALL_DONE;
10841 break;
10843 case OMP_SECTION:
10844 case OMP_MASTER:
10845 case OMP_TASKGROUP:
10846 case OMP_ORDERED:
10847 case OMP_CRITICAL:
10849 gimple_seq body = NULL;
10850 gimple *g;
10852 gimplify_and_add (OMP_BODY (*expr_p), &body);
10853 switch (TREE_CODE (*expr_p))
10855 case OMP_SECTION:
10856 g = gimple_build_omp_section (body);
10857 break;
10858 case OMP_MASTER:
10859 g = gimple_build_omp_master (body);
10860 break;
10861 case OMP_TASKGROUP:
10863 gimple_seq cleanup = NULL;
10864 tree fn
10865 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
10866 g = gimple_build_call (fn, 0);
10867 gimple_seq_add_stmt (&cleanup, g);
10868 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10869 body = NULL;
10870 gimple_seq_add_stmt (&body, g);
10871 g = gimple_build_omp_taskgroup (body);
10873 break;
10874 case OMP_ORDERED:
10875 g = gimplify_omp_ordered (*expr_p, body);
10876 break;
10877 case OMP_CRITICAL:
10878 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
10879 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
10880 gimplify_adjust_omp_clauses (pre_p, body,
10881 &OMP_CRITICAL_CLAUSES (*expr_p),
10882 OMP_CRITICAL);
10883 g = gimple_build_omp_critical (body,
10884 OMP_CRITICAL_NAME (*expr_p),
10885 OMP_CRITICAL_CLAUSES (*expr_p));
10886 break;
10887 default:
10888 gcc_unreachable ();
10890 gimplify_seq_add_stmt (pre_p, g);
10891 ret = GS_ALL_DONE;
10892 break;
10895 case OMP_ATOMIC:
10896 case OMP_ATOMIC_READ:
10897 case OMP_ATOMIC_CAPTURE_OLD:
10898 case OMP_ATOMIC_CAPTURE_NEW:
10899 ret = gimplify_omp_atomic (expr_p, pre_p);
10900 break;
10902 case TRANSACTION_EXPR:
10903 ret = gimplify_transaction (expr_p, pre_p);
10904 break;
10906 case TRUTH_AND_EXPR:
10907 case TRUTH_OR_EXPR:
10908 case TRUTH_XOR_EXPR:
10910 tree orig_type = TREE_TYPE (*expr_p);
10911 tree new_type, xop0, xop1;
10912 *expr_p = gimple_boolify (*expr_p);
10913 new_type = TREE_TYPE (*expr_p);
10914 if (!useless_type_conversion_p (orig_type, new_type))
10916 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
10917 ret = GS_OK;
10918 break;
10921 /* Boolified binary truth expressions are semantically equivalent
10922 to bitwise binary expressions. Canonicalize them to the
10923 bitwise variant. */
10924 switch (TREE_CODE (*expr_p))
10926 case TRUTH_AND_EXPR:
10927 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
10928 break;
10929 case TRUTH_OR_EXPR:
10930 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
10931 break;
10932 case TRUTH_XOR_EXPR:
10933 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
10934 break;
10935 default:
10936 break;
10938 /* Now make sure that operands have compatible type to
10939 expression's new_type. */
10940 xop0 = TREE_OPERAND (*expr_p, 0);
10941 xop1 = TREE_OPERAND (*expr_p, 1);
10942 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
10943 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
10944 new_type,
10945 xop0);
10946 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
10947 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
10948 new_type,
10949 xop1);
10950 /* Continue classified as tcc_binary. */
10951 goto expr_2;
10954 case VEC_COND_EXPR:
10956 enum gimplify_status r0, r1, r2;
10958 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10959 post_p, is_gimple_condexpr, fb_rvalue);
10960 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10961 post_p, is_gimple_val, fb_rvalue);
10962 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
10963 post_p, is_gimple_val, fb_rvalue);
10965 ret = MIN (MIN (r0, r1), r2);
10966 recalculate_side_effects (*expr_p);
10968 break;
10970 case FMA_EXPR:
10971 case VEC_PERM_EXPR:
10972 /* Classified as tcc_expression. */
10973 goto expr_3;
10975 case BIT_INSERT_EXPR:
10976 /* Argument 3 is a constant. */
10977 goto expr_2;
10979 case POINTER_PLUS_EXPR:
10981 enum gimplify_status r0, r1;
10982 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10983 post_p, is_gimple_val, fb_rvalue);
10984 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10985 post_p, is_gimple_val, fb_rvalue);
10986 recalculate_side_effects (*expr_p);
10987 ret = MIN (r0, r1);
10988 break;
10991 case CILK_SYNC_STMT:
10993 if (!fn_contains_cilk_spawn_p (cfun))
10995 error_at (EXPR_LOCATION (*expr_p),
10996 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
10997 ret = GS_ERROR;
10999 else
11001 gimplify_cilk_sync (expr_p, pre_p);
11002 ret = GS_ALL_DONE;
11004 break;
11007 default:
11008 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
11010 case tcc_comparison:
11011 /* Handle comparison of objects of non scalar mode aggregates
11012 with a call to memcmp. It would be nice to only have to do
11013 this for variable-sized objects, but then we'd have to allow
11014 the same nest of reference nodes we allow for MODIFY_EXPR and
11015 that's too complex.
11017 Compare scalar mode aggregates as scalar mode values. Using
11018 memcmp for them would be very inefficient at best, and is
11019 plain wrong if bitfields are involved. */
11021 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
11023 /* Vector comparisons need no boolification. */
11024 if (TREE_CODE (type) == VECTOR_TYPE)
11025 goto expr_2;
11026 else if (!AGGREGATE_TYPE_P (type))
11028 tree org_type = TREE_TYPE (*expr_p);
11029 *expr_p = gimple_boolify (*expr_p);
11030 if (!useless_type_conversion_p (org_type,
11031 TREE_TYPE (*expr_p)))
11033 *expr_p = fold_convert_loc (input_location,
11034 org_type, *expr_p);
11035 ret = GS_OK;
11037 else
11038 goto expr_2;
11040 else if (TYPE_MODE (type) != BLKmode)
11041 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
11042 else
11043 ret = gimplify_variable_sized_compare (expr_p);
11045 break;
11048 /* If *EXPR_P does not need to be special-cased, handle it
11049 according to its class. */
11050 case tcc_unary:
11051 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11052 post_p, is_gimple_val, fb_rvalue);
11053 break;
11055 case tcc_binary:
11056 expr_2:
11058 enum gimplify_status r0, r1;
11060 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11061 post_p, is_gimple_val, fb_rvalue);
11062 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11063 post_p, is_gimple_val, fb_rvalue);
11065 ret = MIN (r0, r1);
11066 break;
11069 expr_3:
11071 enum gimplify_status r0, r1, r2;
11073 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11074 post_p, is_gimple_val, fb_rvalue);
11075 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11076 post_p, is_gimple_val, fb_rvalue);
11077 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11078 post_p, is_gimple_val, fb_rvalue);
11080 ret = MIN (MIN (r0, r1), r2);
11081 break;
11084 case tcc_declaration:
11085 case tcc_constant:
11086 ret = GS_ALL_DONE;
11087 goto dont_recalculate;
11089 default:
11090 gcc_unreachable ();
11093 recalculate_side_effects (*expr_p);
11095 dont_recalculate:
11096 break;
11099 gcc_assert (*expr_p || ret != GS_OK);
11101 while (ret == GS_OK);
11103 /* If we encountered an error_mark somewhere nested inside, either
11104 stub out the statement or propagate the error back out. */
11105 if (ret == GS_ERROR)
11107 if (is_statement)
11108 *expr_p = NULL;
11109 goto out;
11112 /* This was only valid as a return value from the langhook, which
11113 we handled. Make sure it doesn't escape from any other context. */
11114 gcc_assert (ret != GS_UNHANDLED);
11116 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
11118 /* We aren't looking for a value, and we don't have a valid
11119 statement. If it doesn't have side-effects, throw it away. */
11120 if (!TREE_SIDE_EFFECTS (*expr_p))
11121 *expr_p = NULL;
11122 else if (!TREE_THIS_VOLATILE (*expr_p))
11124 /* This is probably a _REF that contains something nested that
11125 has side effects. Recurse through the operands to find it. */
11126 enum tree_code code = TREE_CODE (*expr_p);
11128 switch (code)
11130 case COMPONENT_REF:
11131 case REALPART_EXPR:
11132 case IMAGPART_EXPR:
11133 case VIEW_CONVERT_EXPR:
11134 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11135 gimple_test_f, fallback);
11136 break;
11138 case ARRAY_REF:
11139 case ARRAY_RANGE_REF:
11140 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11141 gimple_test_f, fallback);
11142 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11143 gimple_test_f, fallback);
11144 break;
11146 default:
11147 /* Anything else with side-effects must be converted to
11148 a valid statement before we get here. */
11149 gcc_unreachable ();
11152 *expr_p = NULL;
11154 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
11155 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
11157 /* Historically, the compiler has treated a bare reference
11158 to a non-BLKmode volatile lvalue as forcing a load. */
11159 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
11161 /* Normally, we do not want to create a temporary for a
11162 TREE_ADDRESSABLE type because such a type should not be
11163 copied by bitwise-assignment. However, we make an
11164 exception here, as all we are doing here is ensuring that
11165 we read the bytes that make up the type. We use
11166 create_tmp_var_raw because create_tmp_var will abort when
11167 given a TREE_ADDRESSABLE type. */
11168 tree tmp = create_tmp_var_raw (type, "vol");
11169 gimple_add_tmp_var (tmp);
11170 gimplify_assign (tmp, *expr_p, pre_p);
11171 *expr_p = NULL;
11173 else
11174 /* We can't do anything useful with a volatile reference to
11175 an incomplete type, so just throw it away. Likewise for
11176 a BLKmode type, since any implicit inner load should
11177 already have been turned into an explicit one by the
11178 gimplification process. */
11179 *expr_p = NULL;
11182 /* If we are gimplifying at the statement level, we're done. Tack
11183 everything together and return. */
11184 if (fallback == fb_none || is_statement)
11186 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
11187 it out for GC to reclaim it. */
11188 *expr_p = NULL_TREE;
11190 if (!gimple_seq_empty_p (internal_pre)
11191 || !gimple_seq_empty_p (internal_post))
11193 gimplify_seq_add_seq (&internal_pre, internal_post);
11194 gimplify_seq_add_seq (pre_p, internal_pre);
11197 /* The result of gimplifying *EXPR_P is going to be the last few
11198 statements in *PRE_P and *POST_P. Add location information
11199 to all the statements that were added by the gimplification
11200 helpers. */
11201 if (!gimple_seq_empty_p (*pre_p))
11202 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
11204 if (!gimple_seq_empty_p (*post_p))
11205 annotate_all_with_location_after (*post_p, post_last_gsi,
11206 input_location);
11208 goto out;
11211 #ifdef ENABLE_GIMPLE_CHECKING
11212 if (*expr_p)
11214 enum tree_code code = TREE_CODE (*expr_p);
11215 /* These expressions should already be in gimple IR form. */
11216 gcc_assert (code != MODIFY_EXPR
11217 && code != ASM_EXPR
11218 && code != BIND_EXPR
11219 && code != CATCH_EXPR
11220 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
11221 && code != EH_FILTER_EXPR
11222 && code != GOTO_EXPR
11223 && code != LABEL_EXPR
11224 && code != LOOP_EXPR
11225 && code != SWITCH_EXPR
11226 && code != TRY_FINALLY_EXPR
11227 && code != OACC_PARALLEL
11228 && code != OACC_KERNELS
11229 && code != OACC_DATA
11230 && code != OACC_HOST_DATA
11231 && code != OACC_DECLARE
11232 && code != OACC_UPDATE
11233 && code != OACC_ENTER_DATA
11234 && code != OACC_EXIT_DATA
11235 && code != OACC_CACHE
11236 && code != OMP_CRITICAL
11237 && code != OMP_FOR
11238 && code != OACC_LOOP
11239 && code != OMP_MASTER
11240 && code != OMP_TASKGROUP
11241 && code != OMP_ORDERED
11242 && code != OMP_PARALLEL
11243 && code != OMP_SECTIONS
11244 && code != OMP_SECTION
11245 && code != OMP_SINGLE);
11247 #endif
11249 /* Otherwise we're gimplifying a subexpression, so the resulting
11250 value is interesting. If it's a valid operand that matches
11251 GIMPLE_TEST_F, we're done. Unless we are handling some
11252 post-effects internally; if that's the case, we need to copy into
11253 a temporary before adding the post-effects to POST_P. */
11254 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
11255 goto out;
11257 /* Otherwise, we need to create a new temporary for the gimplified
11258 expression. */
11260 /* We can't return an lvalue if we have an internal postqueue. The
11261 object the lvalue refers to would (probably) be modified by the
11262 postqueue; we need to copy the value out first, which means an
11263 rvalue. */
11264 if ((fallback & fb_lvalue)
11265 && gimple_seq_empty_p (internal_post)
11266 && is_gimple_addressable (*expr_p))
11268 /* An lvalue will do. Take the address of the expression, store it
11269 in a temporary, and replace the expression with an INDIRECT_REF of
11270 that temporary. */
11271 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
11272 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
11273 *expr_p = build_simple_mem_ref (tmp);
11275 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
11277 /* An rvalue will do. Assign the gimplified expression into a
11278 new temporary TMP and replace the original expression with
11279 TMP. First, make sure that the expression has a type so that
11280 it can be assigned into a temporary. */
11281 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
11282 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
11284 else
11286 #ifdef ENABLE_GIMPLE_CHECKING
11287 if (!(fallback & fb_mayfail))
11289 fprintf (stderr, "gimplification failed:\n");
11290 print_generic_expr (stderr, *expr_p, 0);
11291 debug_tree (*expr_p);
11292 internal_error ("gimplification failed");
11294 #endif
11295 gcc_assert (fallback & fb_mayfail);
11297 /* If this is an asm statement, and the user asked for the
11298 impossible, don't die. Fail and let gimplify_asm_expr
11299 issue an error. */
11300 ret = GS_ERROR;
11301 goto out;
11304 /* Make sure the temporary matches our predicate. */
11305 gcc_assert ((*gimple_test_f) (*expr_p));
11307 if (!gimple_seq_empty_p (internal_post))
11309 annotate_all_with_location (internal_post, input_location);
11310 gimplify_seq_add_seq (pre_p, internal_post);
11313 out:
11314 input_location = saved_location;
11315 return ret;
11318 /* Like gimplify_expr but make sure the gimplified result is not itself
11319 a SSA name (but a decl if it were). Temporaries required by
11320 evaluating *EXPR_P may be still SSA names. */
11322 static enum gimplify_status
11323 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11324 bool (*gimple_test_f) (tree), fallback_t fallback,
11325 bool allow_ssa)
11327 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
11328 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
11329 gimple_test_f, fallback);
11330 if (! allow_ssa
11331 && TREE_CODE (*expr_p) == SSA_NAME)
11333 tree name = *expr_p;
11334 if (was_ssa_name_p)
11335 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
11336 else
11338 /* Avoid the extra copy if possible. */
11339 *expr_p = create_tmp_reg (TREE_TYPE (name));
11340 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
11341 release_ssa_name (name);
11344 return ret;
11347 /* Look through TYPE for variable-sized objects and gimplify each such
11348 size that we find. Add to LIST_P any statements generated. */
11350 void
11351 gimplify_type_sizes (tree type, gimple_seq *list_p)
11353 tree field, t;
11355 if (type == NULL || type == error_mark_node)
11356 return;
11358 /* We first do the main variant, then copy into any other variants. */
11359 type = TYPE_MAIN_VARIANT (type);
11361 /* Avoid infinite recursion. */
11362 if (TYPE_SIZES_GIMPLIFIED (type))
11363 return;
11365 TYPE_SIZES_GIMPLIFIED (type) = 1;
11367 switch (TREE_CODE (type))
11369 case INTEGER_TYPE:
11370 case ENUMERAL_TYPE:
11371 case BOOLEAN_TYPE:
11372 case REAL_TYPE:
11373 case FIXED_POINT_TYPE:
11374 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
11375 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
11377 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
11379 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
11380 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
11382 break;
11384 case ARRAY_TYPE:
11385 /* These types may not have declarations, so handle them here. */
11386 gimplify_type_sizes (TREE_TYPE (type), list_p);
11387 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
11388 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
11389 with assigned stack slots, for -O1+ -g they should be tracked
11390 by VTA. */
11391 if (!(TYPE_NAME (type)
11392 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
11393 && DECL_IGNORED_P (TYPE_NAME (type)))
11394 && TYPE_DOMAIN (type)
11395 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
11397 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
11398 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11399 DECL_IGNORED_P (t) = 0;
11400 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
11401 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11402 DECL_IGNORED_P (t) = 0;
11404 break;
11406 case RECORD_TYPE:
11407 case UNION_TYPE:
11408 case QUAL_UNION_TYPE:
11409 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
11410 if (TREE_CODE (field) == FIELD_DECL)
11412 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
11413 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
11414 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
11415 gimplify_type_sizes (TREE_TYPE (field), list_p);
11417 break;
11419 case POINTER_TYPE:
11420 case REFERENCE_TYPE:
11421 /* We used to recurse on the pointed-to type here, which turned out to
11422 be incorrect because its definition might refer to variables not
11423 yet initialized at this point if a forward declaration is involved.
11425 It was actually useful for anonymous pointed-to types to ensure
11426 that the sizes evaluation dominates every possible later use of the
11427 values. Restricting to such types here would be safe since there
11428 is no possible forward declaration around, but would introduce an
11429 undesirable middle-end semantic to anonymity. We then defer to
11430 front-ends the responsibility of ensuring that the sizes are
11431 evaluated both early and late enough, e.g. by attaching artificial
11432 type declarations to the tree. */
11433 break;
11435 default:
11436 break;
11439 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
11440 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
11442 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
11444 TYPE_SIZE (t) = TYPE_SIZE (type);
11445 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
11446 TYPE_SIZES_GIMPLIFIED (t) = 1;
11450 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
11451 a size or position, has had all of its SAVE_EXPRs evaluated.
11452 We add any required statements to *STMT_P. */
11454 void
11455 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
11457 tree expr = *expr_p;
11459 /* We don't do anything if the value isn't there, is constant, or contains
11460 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
11461 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
11462 will want to replace it with a new variable, but that will cause problems
11463 if this type is from outside the function. It's OK to have that here. */
11464 if (is_gimple_sizepos (expr))
11465 return;
11467 *expr_p = unshare_expr (expr);
11469 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
11470 if the def vanishes. */
11471 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
11474 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
11475 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
11476 is true, also gimplify the parameters. */
11478 gbind *
11479 gimplify_body (tree fndecl, bool do_parms)
11481 location_t saved_location = input_location;
11482 gimple_seq parm_stmts, seq;
11483 gimple *outer_stmt;
11484 gbind *outer_bind;
11485 struct cgraph_node *cgn;
11487 timevar_push (TV_TREE_GIMPLIFY);
11489 init_tree_ssa (cfun);
11491 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
11492 gimplification. */
11493 default_rtl_profile ();
11495 gcc_assert (gimplify_ctxp == NULL);
11496 push_gimplify_context (true);
11498 if (flag_openacc || flag_openmp)
11500 gcc_assert (gimplify_omp_ctxp == NULL);
11501 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
11502 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
11505 /* Unshare most shared trees in the body and in that of any nested functions.
11506 It would seem we don't have to do this for nested functions because
11507 they are supposed to be output and then the outer function gimplified
11508 first, but the g++ front end doesn't always do it that way. */
11509 unshare_body (fndecl);
11510 unvisit_body (fndecl);
11512 cgn = cgraph_node::get (fndecl);
11513 if (cgn && cgn->origin)
11514 nonlocal_vlas = new hash_set<tree>;
11516 /* Make sure input_location isn't set to something weird. */
11517 input_location = DECL_SOURCE_LOCATION (fndecl);
11519 /* Resolve callee-copies. This has to be done before processing
11520 the body so that DECL_VALUE_EXPR gets processed correctly. */
11521 parm_stmts = do_parms ? gimplify_parameters () : NULL;
11523 /* Gimplify the function's body. */
11524 seq = NULL;
11525 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
11526 outer_stmt = gimple_seq_first_stmt (seq);
11527 if (!outer_stmt)
11529 outer_stmt = gimple_build_nop ();
11530 gimplify_seq_add_stmt (&seq, outer_stmt);
11533 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
11534 not the case, wrap everything in a GIMPLE_BIND to make it so. */
11535 if (gimple_code (outer_stmt) == GIMPLE_BIND
11536 && gimple_seq_first (seq) == gimple_seq_last (seq))
11537 outer_bind = as_a <gbind *> (outer_stmt);
11538 else
11539 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
11541 DECL_SAVED_TREE (fndecl) = NULL_TREE;
11543 /* If we had callee-copies statements, insert them at the beginning
11544 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
11545 if (!gimple_seq_empty_p (parm_stmts))
11547 tree parm;
11549 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
11550 gimple_bind_set_body (outer_bind, parm_stmts);
11552 for (parm = DECL_ARGUMENTS (current_function_decl);
11553 parm; parm = DECL_CHAIN (parm))
11554 if (DECL_HAS_VALUE_EXPR_P (parm))
11556 DECL_HAS_VALUE_EXPR_P (parm) = 0;
11557 DECL_IGNORED_P (parm) = 0;
11561 if (nonlocal_vlas)
11563 if (nonlocal_vla_vars)
11565 /* tree-nested.c may later on call declare_vars (..., true);
11566 which relies on BLOCK_VARS chain to be the tail of the
11567 gimple_bind_vars chain. Ensure we don't violate that
11568 assumption. */
11569 if (gimple_bind_block (outer_bind)
11570 == DECL_INITIAL (current_function_decl))
11571 declare_vars (nonlocal_vla_vars, outer_bind, true);
11572 else
11573 BLOCK_VARS (DECL_INITIAL (current_function_decl))
11574 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
11575 nonlocal_vla_vars);
11576 nonlocal_vla_vars = NULL_TREE;
11578 delete nonlocal_vlas;
11579 nonlocal_vlas = NULL;
11582 if ((flag_openacc || flag_openmp || flag_openmp_simd)
11583 && gimplify_omp_ctxp)
11585 delete_omp_context (gimplify_omp_ctxp);
11586 gimplify_omp_ctxp = NULL;
11589 pop_gimplify_context (outer_bind);
11590 gcc_assert (gimplify_ctxp == NULL);
11592 if (flag_checking && !seen_error ())
11593 verify_gimple_in_seq (gimple_bind_body (outer_bind));
11595 timevar_pop (TV_TREE_GIMPLIFY);
11596 input_location = saved_location;
11598 return outer_bind;
11601 typedef char *char_p; /* For DEF_VEC_P. */
11603 /* Return whether we should exclude FNDECL from instrumentation. */
11605 static bool
11606 flag_instrument_functions_exclude_p (tree fndecl)
11608 vec<char_p> *v;
11610 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
11611 if (v && v->length () > 0)
11613 const char *name;
11614 int i;
11615 char *s;
11617 name = lang_hooks.decl_printable_name (fndecl, 0);
11618 FOR_EACH_VEC_ELT (*v, i, s)
11619 if (strstr (name, s) != NULL)
11620 return true;
11623 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
11624 if (v && v->length () > 0)
11626 const char *name;
11627 int i;
11628 char *s;
11630 name = DECL_SOURCE_FILE (fndecl);
11631 FOR_EACH_VEC_ELT (*v, i, s)
11632 if (strstr (name, s) != NULL)
11633 return true;
11636 return false;
11639 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
11640 node for the function we want to gimplify.
11642 Return the sequence of GIMPLE statements corresponding to the body
11643 of FNDECL. */
11645 void
11646 gimplify_function_tree (tree fndecl)
11648 tree parm, ret;
11649 gimple_seq seq;
11650 gbind *bind;
11652 gcc_assert (!gimple_body (fndecl));
11654 if (DECL_STRUCT_FUNCTION (fndecl))
11655 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
11656 else
11657 push_struct_function (fndecl);
11659 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
11660 if necessary. */
11661 cfun->curr_properties |= PROP_gimple_lva;
11663 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
11665 /* Preliminarily mark non-addressed complex variables as eligible
11666 for promotion to gimple registers. We'll transform their uses
11667 as we find them. */
11668 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
11669 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
11670 && !TREE_THIS_VOLATILE (parm)
11671 && !needs_to_live_in_memory (parm))
11672 DECL_GIMPLE_REG_P (parm) = 1;
11675 ret = DECL_RESULT (fndecl);
11676 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
11677 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
11678 && !needs_to_live_in_memory (ret))
11679 DECL_GIMPLE_REG_P (ret) = 1;
11681 bind = gimplify_body (fndecl, true);
11683 /* The tree body of the function is no longer needed, replace it
11684 with the new GIMPLE body. */
11685 seq = NULL;
11686 gimple_seq_add_stmt (&seq, bind);
11687 gimple_set_body (fndecl, seq);
11689 /* If we're instrumenting function entry/exit, then prepend the call to
11690 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
11691 catch the exit hook. */
11692 /* ??? Add some way to ignore exceptions for this TFE. */
11693 if (flag_instrument_function_entry_exit
11694 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
11695 && !flag_instrument_functions_exclude_p (fndecl))
11697 tree x;
11698 gbind *new_bind;
11699 gimple *tf;
11700 gimple_seq cleanup = NULL, body = NULL;
11701 tree tmp_var;
11702 gcall *call;
11704 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
11705 call = gimple_build_call (x, 1, integer_zero_node);
11706 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11707 gimple_call_set_lhs (call, tmp_var);
11708 gimplify_seq_add_stmt (&cleanup, call);
11709 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
11710 call = gimple_build_call (x, 2,
11711 build_fold_addr_expr (current_function_decl),
11712 tmp_var);
11713 gimplify_seq_add_stmt (&cleanup, call);
11714 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
11716 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
11717 call = gimple_build_call (x, 1, integer_zero_node);
11718 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11719 gimple_call_set_lhs (call, tmp_var);
11720 gimplify_seq_add_stmt (&body, call);
11721 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
11722 call = gimple_build_call (x, 2,
11723 build_fold_addr_expr (current_function_decl),
11724 tmp_var);
11725 gimplify_seq_add_stmt (&body, call);
11726 gimplify_seq_add_stmt (&body, tf);
11727 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
11728 /* Clear the block for BIND, since it is no longer directly inside
11729 the function, but within a try block. */
11730 gimple_bind_set_block (bind, NULL);
11732 /* Replace the current function body with the body
11733 wrapped in the try/finally TF. */
11734 seq = NULL;
11735 gimple_seq_add_stmt (&seq, new_bind);
11736 gimple_set_body (fndecl, seq);
11737 bind = new_bind;
11740 if ((flag_sanitize & SANITIZE_THREAD) != 0
11741 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
11743 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
11744 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
11745 gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
11746 /* Clear the block for BIND, since it is no longer directly inside
11747 the function, but within a try block. */
11748 gimple_bind_set_block (bind, NULL);
11749 /* Replace the current function body with the body
11750 wrapped in the try/finally TF. */
11751 seq = NULL;
11752 gimple_seq_add_stmt (&seq, new_bind);
11753 gimple_set_body (fndecl, seq);
11756 DECL_SAVED_TREE (fndecl) = NULL_TREE;
11757 cfun->curr_properties |= PROP_gimple_any;
11759 pop_cfun ();
11761 dump_function (TDI_generic, fndecl);
11764 /* Return a dummy expression of type TYPE in order to keep going after an
11765 error. */
11767 static tree
11768 dummy_object (tree type)
11770 tree t = build_int_cst (build_pointer_type (type), 0);
11771 return build2 (MEM_REF, type, t, t);
11774 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
11775 builtin function, but a very special sort of operator. */
11777 enum gimplify_status
11778 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
11779 gimple_seq *post_p ATTRIBUTE_UNUSED)
11781 tree promoted_type, have_va_type;
11782 tree valist = TREE_OPERAND (*expr_p, 0);
11783 tree type = TREE_TYPE (*expr_p);
11784 tree t, tag, aptag;
11785 location_t loc = EXPR_LOCATION (*expr_p);
11787 /* Verify that valist is of the proper type. */
11788 have_va_type = TREE_TYPE (valist);
11789 if (have_va_type == error_mark_node)
11790 return GS_ERROR;
11791 have_va_type = targetm.canonical_va_list_type (have_va_type);
11793 if (have_va_type == NULL_TREE)
11795 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
11796 return GS_ERROR;
11799 /* Generate a diagnostic for requesting data of a type that cannot
11800 be passed through `...' due to type promotion at the call site. */
11801 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
11802 != type)
11804 static bool gave_help;
11805 bool warned;
11806 /* Use the expansion point to handle cases such as passing bool (defined
11807 in a system header) through `...'. */
11808 source_location xloc
11809 = expansion_point_location_if_in_system_header (loc);
11811 /* Unfortunately, this is merely undefined, rather than a constraint
11812 violation, so we cannot make this an error. If this call is never
11813 executed, the program is still strictly conforming. */
11814 warned = warning_at (xloc, 0,
11815 "%qT is promoted to %qT when passed through %<...%>",
11816 type, promoted_type);
11817 if (!gave_help && warned)
11819 gave_help = true;
11820 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
11821 promoted_type, type);
11824 /* We can, however, treat "undefined" any way we please.
11825 Call abort to encourage the user to fix the program. */
11826 if (warned)
11827 inform (xloc, "if this code is reached, the program will abort");
11828 /* Before the abort, allow the evaluation of the va_list
11829 expression to exit or longjmp. */
11830 gimplify_and_add (valist, pre_p);
11831 t = build_call_expr_loc (loc,
11832 builtin_decl_implicit (BUILT_IN_TRAP), 0);
11833 gimplify_and_add (t, pre_p);
11835 /* This is dead code, but go ahead and finish so that the
11836 mode of the result comes out right. */
11837 *expr_p = dummy_object (type);
11838 return GS_ALL_DONE;
11841 tag = build_int_cst (build_pointer_type (type), 0);
11842 aptag = build_int_cst (TREE_TYPE (valist), 0);
11844 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
11845 valist, tag, aptag);
11847 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
11848 needs to be expanded. */
11849 cfun->curr_properties &= ~PROP_gimple_lva;
11851 return GS_OK;
11854 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
11856 DST/SRC are the destination and source respectively. You can pass
11857 ungimplified trees in DST or SRC, in which case they will be
11858 converted to a gimple operand if necessary.
11860 This function returns the newly created GIMPLE_ASSIGN tuple. */
11862 gimple *
11863 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
11865 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11866 gimplify_and_add (t, seq_p);
11867 ggc_free (t);
11868 return gimple_seq_last_stmt (*seq_p);
11871 inline hashval_t
11872 gimplify_hasher::hash (const elt_t *p)
11874 tree t = p->val;
11875 return iterative_hash_expr (t, 0);
11878 inline bool
11879 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
11881 tree t1 = p1->val;
11882 tree t2 = p2->val;
11883 enum tree_code code = TREE_CODE (t1);
11885 if (TREE_CODE (t2) != code
11886 || TREE_TYPE (t1) != TREE_TYPE (t2))
11887 return false;
11889 if (!operand_equal_p (t1, t2, 0))
11890 return false;
11892 /* Only allow them to compare equal if they also hash equal; otherwise
11893 results are nondeterminate, and we fail bootstrap comparison. */
11894 gcc_checking_assert (hash (p1) == hash (p2));
11896 return true;